var/home/core/zuul-output/0000755000175000017500000000000015136427032014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015136454035015477 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000422032715136453704020270 0ustar corecoreWzikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD  ?,Eڤ펯_ˎ6Ϸ7+%f?長ox[o8W5!Kޒ/h3_.gSeq5v(×_~^ǿq]n>߮}+ԏbś E^"Y^-Vۋz7wH׋0g"ŒGǯguz|ny;#)a "b BLc?^^4[ftlR%KF^j 8DΆgS^Kz۞_W#|`zIlp_@oEy5 fs&2x*g+W4m ɭiE߳Kfn!#Šgv cXk?`;'`&R7߿YKS'owHF6":=3Ȑ 3xҝd){Ts}cZ%BdARO#-o"D"ޮrFg4" 0ʡPBU[fi;dYu' IAgfPF:c0Ys66q tH6#.`$vlLH}ޭA㑝V0>|J\Pg\W#NqɌDSd1d9nT#Abn q1J# !8,$RNI? j!bE"o j/o\E`r"hA ós yi\[.!=A(%Ud,QwC}F][UVYE NQGn0Ƞɻ>.ww}(o./WY<͉#5O H 'wo6C9yg|O~ €'} S[q?,!yq%a:y<\tunL h%$Ǥ].v y[W_` \r/Ɛ%aޗ' B.-^ mQYd'xP2ewEڊL|^ͣrZg7n͐AG%ʷr<>; 2W>h?y|(G>ClsXT(VIx$(J:&~CQpkۗgVKx*lJ3o|s`<՛=JPBUGߩnX#;4ٻO2{Fݫr~AreFj?wQC9yO|$UvވkZoIfzC|]|[>ӸUKҳt17ä$ ֈm maUNvS_$qrMY QOΨN!㞊;4U^Z/ QB?q3En.اeI"X#gZ+Xk?povR]8~깮$b@n3xh!|t{: CºC{ 8Ѿm[ ~z/9آs;DPsif39HoN λC?; H^-¸oZ( +"@@%'0MtW#:7erԮoQ#% H!PK)~U,jxQV^pΣ@Klb5)%L%7׷v] gv6دϾDD}c6  %T%St{kJ_O{*Z8Y CEO+'HqZY PTUJ2dic3w ?YQgpa` Z_0΁?kMPc_Ԝ*΄Bs`kmJ?t 53@հ1hr}=5t;nt 9:I_|AאM'NO;uD,z҄R K&Nh c{A`?2ZҘ[a-0V&2D[d#L6l\Jk}8gf) afs'oIf'mf\>UxR ks J)'u4iLaNIc2qdNA&aLQVD R0*06V۽棬mpھ*V I{a 0Ҟҝ>Ϗ ,ȓw`Ȅ/2Zjǽ}W4D)3N*[kPF =trSE *b9ē7$ M_8.Ç"q ChCMAgSdL0#W+CUu"k"圀̲F9,,&h'ZJz4U\d +( 7EqڏuC+]CEF 8'9@OVvnNbm: X„RDXfיa }fqG*YƩ{P0K=( $hC=h2@M+ `@P4Re]1he}k|]eO,v^ȹ [=zX[tꆯI7c<ۃ'B쿫dIc*Qqk&60XdGY!D ' @{!b4ִ s Exb 5dKߤKߒ'&YILұ4q6y{&G`%$8Tt ȥ#5vGVO2Қ;m#NS8}d0Q?zLV3\LuOx:,|$;rVauNjk-ؘPꐤ`FD'JɻXC&{>.}y7Z,).Y톯h7n%PAUË?/,z_jx܍>М>ӗom$rۇnu~Y݇̇TIwӜ'}׃nxuoỴRZ&Yzbm ]) %1(Y^9{q"4e?x+ [Vz;E|d1&ږ/0-Vb=SSO|k1A[|gbͧɇد;:X:@;afU=Sru CK >Y%LwM*t{zƝ$;ȾjHim @tBODɆj>0st\t@HTu( v e`H*1aK`3CmF1K>*Mk{_'֜dN${OT-n,'}6ȴ .#Sqη9]5zoX#ZVOy4%-Lq6dACYm*H@:FUф(vcD%F"i ' VVdmcOTKpwq.M?m12N[=tuw}opYG]2u<ΰ+a1tHayɒ aY(P*aaʨ@ΰ<pX X{k[%Egl1$9  ֲQ$'dJVE%mT{z`R$77.N|b>harNJ(Bň0ae3V#b,PY0TEu1L/]MTB4$`H6NI\nbǛ*AyA\(u|@ [h-,j7gDTÎ4oWJ$j!frH_HI\:U}UE$J @ٚeZE0(8ŋ ϓ{BpY]Q4`Iz_*2coT'ƟlQ.Ff!bpRw@\6"yr+i37Z_j*YLfnYJ~Z~okJX ?A?gU3U;,ד1t7lJ#wՆ;I|p"+I4ˬZcն a.1wXhxDI:;.^m9W_c.4z+ϟMn?!ԫ5H&=JkܓhkB\LQ"<LxeLo4l_m24^3.{oɼʪ~75/nQ?s d|pxu\uw?=QR -Mݞίk@Pc n1æ*m$=4Dbs+J \EƄզ}@۶(ߐ/ۼ𹫘qݎt7Ym݃|M$ 6.x5 TMXbXj-P\jА޴y$j`ROA"EkuS#q * CƂ lu" yo6"3껝I~flQ~NCBX`]ڦÞhkXO _-Qy2$?T3ͤEZ긊mۘ$XD.bͮW`AީClСw5/lbl[N*t*@56."D/< {Dۥ sLxZn$N(lYiV =?_e^0)?]{ @| 6+#gPX>Bk2_@L `CZ?z3~ }[ tŪ)۲-9ֆP}b&x Uhm._O 4m6^^osVЦ+*@5Fˢg'!>$]0 5_glg}릅h:@61Xv` 5DFnx ˭jCtu,R|ۯG8`&ו:ݓ3<:~iXN9`2ŦzhѤ^ MW`c?&d.'[\]}7A[?~R6*.9t,綨 3 6DFe^u; +֡X< paan}7ftJ^%0\?mg5k][ip4@]p6Uu|܀|Kx6خQU2KTǺ.ȕPQVzWuk{n#NWj8+\[ ?yiI~fs[:.۽ '5nWppH? 8>X+m7_Z`V j[ s3nϏT=1:T <= pDCm3-b _F(/f<8sl, 0۬Z"X.~b٦G3TE.֣eմi<~ik[m9뀥!cNIl8y$~\T B "2j*ҕ;ێIs ɛqQQKY`\ +\0(FęRQ hN œ@n|Vo|6 8~J[,o%l%!%tyNO}}=ʬ-'vlQ]m"ifӠ1˟ud9)˔~BѤ]һS8]uBi( Ql{]UcLxٻa,2r(#'CDd2݄kTxn@v7^58þ Ţ&VY+yn~F8I !6WB3C%X)ybLFB%X2U6vw8uUF+X|YukXxVO(+gIQp؎Z{TcR@MSRδ~+1æ|mq՗5$B᲋eY(|*磎\Dži`dZe j'V!Mu@ KV{XץF .Jg< ƜINs:b zĄu3=Az4 u5'og^s7`Rzu-anOIq;6z( rx߅ euPvIɦ7聀t>G;_H;2ʗ6 h6QװxmR JQUbTP2j˔Ni)C)HKE"$ӝ!@2<Bq 2oh80,kNA7,?ע|tC3.㤣TiHEIǢƅaeGF$ u2`d)/-st{E1kٌS*#¦۵_Vu3ЩpRIDr/TxF8g4sѓ{%w .ʕ+84ztT:eEK[[;0(1Q@ET0>@wY)aL5ׄӫ A^%f+[`sb˟(]m`F3 W((!5F-9]dDqL&RΖd}})7 k11 K ;%v'_3 dG8d t#MTU']h7^)O>?~?_ȿM4ə#a&Xi`O}6a-xm`8@;of,![0-7 4f kUy:M֖Esa./zʕy[/ݩqz2¼&'QxJE{cZ7C:?pM z*"#窾+ HsOt۩%͟A498SwWv|jNQ=-[ӓI+mj(^>c/"ɭex^k$# $V :]PGszyH(^jJ=䄸-m!AdEږG)շj#v;#y/hbv BO Iߒ {I7!UՆGIl HƗbd#HAF:iI }+2kK:Sov3b:1)'A6@\2X#Ih9IO j u߿d{=t-n3Pnef9[}=%G*9sX,¬xS&9'E&"/"ncx}"mV5tŘ:wcZ К G)]$mbXE ^ǽ8%>,0FЕ 6vAVKVCjrD25#Lrv?33Iam:xy`|Q'eű^\ơ' .gygSAixپ im41;P^azl5|JE2z=.wcMԧ ax& =`|#HQ*lS<.U׻`>ajϿ '!9MHK:9#s,jV剤C:LIeHJ"M8P,$N;a-zݸJWc :.<sR6 լ$gu4M*B(A ݖΑِ %H;S*ڳJt>$M!^*n3qESfU, Iĭb#UFJPvBW^6jىY* 85{pMX+]o$h{KrҎl 5sÁbNW\: "HK<bdYL_Dd)VpA@A i"j<鮗 qwc&dXV0e[g#B4x╙✑3'-i{SEȢbK6}{Ⱥi!ma0o xI0&" 9cT)0ߢ5ڦ==!LgdJΆmΉO]T"DĊKٙ@qP,i Nl:6'5R.j,&tK*iOFsk6[E__0pw=͠qj@o5iX0v\fk= ;H J/,t%Rwó^;n1z"8 P޿[V!ye]VZRԾ|“qNpѓVZD2"VN-m2do9 'H*IM}J ZaG%qn*WE^k1v3ڣjm7>ƽl' ,Τ9)%@ wl42iG.y3bBA{pR A ?IEY ?|-nz#}~f ‰dŷ=ɀ,m7VyIwGHέ 2tޞߛM{FL\#a s.3\}*=#uL#]  GE|FKi3&,ۓxmF͉lG$mN$!;ߑl5O$}D~5| 01 S?tq6cl]M[I5'ոfiҞ:Z YՑ"jyKWk^dd@U_a4/vvV qHMI{+']1m]<$*YP7g# s!8!ߐ>'4k7/KwΦθW'?~>x0_>9Hhs%y{#iUI[Gzďx7OnuKRv'm;/~n-KI`5-'YݦD-!+Y򼤙&m^YAKC˴vҢ]+X`iDf?U7_nMBLϸY&0Ro6Qžl+nݷ" 㬙g|ӱFB@qNx^eCSW3\ZSA !c/!b"'9k I S2=bgj쯏W?=`}H0--VV#YmKW^[?R$+ +cU )?wW@!j-gw2ŝl1!iaI%~`{Tռl>~,?5D K\gd(ZH8@x~5w.4\h(`dc)}1Kqi4~'p!;_V>&M!s}FDͳ֧0O*Vr/tdQu!4YhdqT nXeb|Ivż7>! &ĊL:}3*8&6f5 %>~R݄}WgѨ@OĹCtWai4AY!XH _pw騋[b[%/d>. !Df~;)(Oy )r#.<]]i-*ػ-f24qlT1  jL>1qY|\䛧\|r>Ch}Ϊ=jnk?p ^C8"M#Eޑ-5@f,|Ά(Շ*(XCK*"pXR[كrq IH!6=Ocnи%G"|ڔ^kПy׏<:n:!d#[7>^.hd/}ӾP'k2MؤYy/{!ca /^wT j˚ب|MLE7Ee/I lu//j8MoGqdDt^_Y\-8!ד|$@D.ݮl`p48io^.š{_f>O)J=iwwӑ؇n-i3,1׿5'odۆ3(h>1UW蚍R$W>u v8FHӜ"D$aǽO8'1lfYuB!s׭dֲcUh=Ɩ9b&2} -/f;M.~dhÓ5¨LIa6PnzɗBQiG'CXt!*<0U-(qc;}*CiKe@p&Em&x!i6ٱ˭K& FCfJ9%ٕQ·BD-]R1#]TROr}S [;Zcq6xMY 6seAU9c>Xf~TTX)QӅtӚe~=WtX-sJb?U'3X7J4l+Cj%LPFx&w6οH+NL$]p>8UU>Ѫg39Yg>OF9V?SAT~:gGt $*}aQ.Zi~%K\rfm$%ɪq(%W>*Hg>KStE)KS1z2"h%^NEN?  hxnd/)O{,:خcX1nIaJ/t4J\bƀWc-d4M^d/ ʂK0`v%"s#PCoT/*,:[4b=]N&, ,B82^WK9EHLPm))2.9ȱ  QAcBC-|$M\^B!`}M^t+C~Lb }D>{N{Vt)tpDN,FCz~$)*417l;V iэ(_,j]$9O+/Sh]ice w0/ %IYhq ҕ  O UA!wY~ -`%Űb`\mS38W1`vOF7,.C!Pu&Jm l?Q>}O+D7 P=x@`0ʿ26a>d Bqε^a'NԋsI`Yu.7v$Rt)Ag:ݙyX|HkX cU82IP qgzkX=>׻K?J%E92' ]qҙ%rXgs+"sc9| ]>T]"JرWBΌ-zJS-~y30G@U#=h7) ^EUB Q:>9W΀çM{?`c`uRljצXr:l`T~IQg\Ѝpgu#QH! ,/3`~eB|C1Yg~ؼ/5I7w9I}qww}U~7뭱ԏ,}e7]ukDn`jSlQ7DžHa/EU^IpYWW兹Q7WyTz|nˇ _q㖟A-? [zWW,/:nY_s$-#9?mh{R 4ѭm_9p -h2 dֲ 1"j {]]Nk"-KjLOgeΏe|Bf".ax)֒t0E)J\8ʁ,Gulʂ+lh)6tqd!eó5d ¢ku|M"kP-&ђ5h ^pN0[|B>+q"/[ڲ&6!%<@fpѻKQ31pxFP>TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہt=|X!~Pu(UeS@%Nb:.SZ1d!~\<}LY aBRJ@ѥuȑz.# 3tl7 ]وb Xnݔ[TN1Vzk[~S_vD.yΕ`h9U|A܌ЃECTC Tnpצho!=V qy)U cigs^>sgv"4N9W_iI NRCǔd X1Lb.u@`X]nl}!:ViI[/SE un޷wQFa2V%ZniE|nZ&-I,t*ώlo Lhnٓ'Xm R ˍ-~ά}hs\5TT%~am.>!LcoJrKmqvez܅E9t6FZXgsreHhlٷ+ [}r:̓?W~e6>0E8`Jq-(ed;W¨:Ä&]䒿e;0:|7IIc(Irw~Z"+A<sX4*X FVGA<^^7 vq&EwQű:؁64ĤjI s>U^k6v읨*:}9V|MX!8j0"t \5Ȕa|)v"Tqw?E8V 7z[v_}OO-DcĥF7FX%2@KɴH/=sۄ`gvRqcf:|XUZ#O\_JK\?}3tj>YSD1౱UR}UR,:lơ2<8"˓MlA2 KvP8 I7D Oj>;V|a|Ĺ_SR9e^ CZLu+."T#yrHhlكʼE-3k{ 2ze31cDB/eWy!A/V3uزEM_UA| m' L܀/~{"./Ep.h>hLXULi7.1-Qk%ƩJ4^=ple;u.6vQ>qqOaz.TMk9gk].xYi$B^u"U RW~_,@2\sfa:TX%;+G: pup6_3XqCXznL9:oZ[|s L-+y{5K@dzp`r"mũɸHNd"yc Pu>x2;W`_VR<aӗ&D<=h-Rר|/rx}[jRuo[|[ȯ.7st08`J0ꨴU1|z:9dX)z2!S:'q9| 766fjek3XLHV sR җNE@Ubc@ ۾SHa߽c&kwfy+_jVo,?̴7Nj.v\חq,on[{Uw7z(`Tv]isH+*/H6qƉ$SS)Wݔzkx_dɱӲ335 `ɀ I VA2.bmV{D ,ew?Q"CUTK@UH0TP$[&ie~jpq?T]x@CUBpYj(Bb*mhʱ4(9\\qv3X) V1*Vꢬ6̸=J.A!R$2TpoD$Ze0TXFu<`5z\M>B*s6Ǭ`ЬNysgWCgNI闷:%e'ߝaAd1'xn|z`1{n|M#t_+c/B=a>/Ë,Oxϋ+4Hx3fAϞWc;UGD6!7]# 5z#hҚir4Yqd\{@V<+A9_i=%_JeͧmP+la<ꙧwxfÙStg3ҽICKf3-<3ig[w~oY! \7>>89}|:У[|(~ٳ|sw( qDݑ',,mMڗZ ^ Mp-d]QH" l|2\i_\X {Dh&믎â0E`5xesPy|M״<׳_B!߰$^fo /ĵ7il>Ҵ5kZ+E^fB#.r`U(gK@Q'F(ӥdj!F]p5*s6Q[Uԛ/h*hQUyrrGwyY%XKo1U{#(ʊQk1)Z`ةeh N6TU}Ks)'9k,XGf2G8F"ɖ &PB6KVүԴ + vGE_P = s-#+xt33jlXϲQKhk O~;}<#A |Ȇa|^9ϳ&|{t=%h 7NQĭ  ѱ4U^e3V*zv;ܙmk"P/ȣe_Gu;G[tTT4Pޠޟu;%vkO?b<ٓƱtVǕrt_,W/ K0su%B؍:AXW,L h/0DSg YZf8o's,HE= |ۋFc^4-Ѡ8c1ʰ) ~ EU֞߶g;k/q|Zp )}#7b!,h .8lGݓDbﳴ+|\d 2yڼb΂,EV%q]KO]]8NܿX =ͅ4ѫw`: o{ۇ#4 l,,^%X\-N"=T;4g Gt-+/q QԣZ~a ?Z[q~Iܞ|Da)A近lqީ@RU<#Da^(8%ݿԃͳA%pw^949GDm3@}/8ήdBҰ_it4Ko.5 x|FU5%qY DVGNf^)3*v%f,<>X\EYК-X|d&zICj$YU j 8(G+vӔ\{!3x$Q8dQh. G!lԚM-$/pVarE`)ϒ ~Br x,5_olk"g44Be\VHWОcTMaQ2*iD7j!Z*I*5-_YI }3u/X)~Ө*Ra"-gZywdRs50d;pv5PPŁ~x hg*)uw_x;c:d|)ˬp|r%wSv<̡-aEh}Wwov[= k,* 76J߮@%Y+r 3"eMF N=aǼ¨bCA7bL ܁Ӥ-@h\Ȯ=5hi ޝrse>!p-%R oXUaCCl{'\(mOsQB|75RϳD~p˳R8[7Q:COOlH]{%:׳(0].;ډ1LC]􈋈quLۏ߾l@5hĚ:bxS{^޿4VMܕzudB UKKپNLΎ0ogS;LbZVy-`F< J?`Giz:s0s8V>4X.:Z*fM'&ӵQ0Y|+ ԾfuDòzjdT Se0u$ T{ZD򬪞hx#!Y(MY=dwRmQ*Bsm8^٪]Fq].6}$ D12Qp [XS.d"̈24walz4&YIQQ%69gt--GUmeV`PA@$0+ %săݣM[+EӺX`?,̵=5eb備Ҳ+LQ<ˏx`lD,SgzFerT'zdwMrTu-AZ%op֚nVԝ.ZO}U&ƖDXp]@iQpFO+ee_&Sh iYl"-`*SS&E>8(˚`.|LU]7὿_~̦-L":c'%;kC4bU4D2v$zdrJAb&E wu*̄wh;D 2GO. ыwUݶڱJN4 Zu}cp,]~l݀4^=xg"[#]0̒|X"wUd*qy}\ծ|V1s8U%% UD:f5 K_]"qʰ  ,*qtݍz0*rrWōgyy(u=M% 1liN5iB>e*bRޤVNVQlxT;3 /D",gE5/hTe wTR-2˞*WD+kxF۳\ipE*&LKqӋTUN,l~2OU鞮y70ÒuXF^s[IXڵ|ea f*Ga5V[6_u7Cc tb2B>{o~wHI4'2r|dX桴,M4Bg~F 'cJKrx= }5lx@:Q?ѭVw'$Pjq}0O3O߿PܟѴ40 5R,d\O=nS'Pa90ecL6c ŤS'<{fnu`}8`LџiN{8 fK(wsq7cjUGdFSW:M80r ;5J*Oy_dQX4GW`'in=xcsL@R!$nћ39/ZB[Ӱ'sE*;Q+(Y~^i .#EHo.%?9L 0!I)ьH5B*;<],:oUrDCg@CQc|h4e!xܟs0/jԏ~v6!oYE|9)}owRՖzl@Go޾i"-)[2\\%Qv_ETgj|ƕBx{-IKE KrmIv\fHI'MVۭmI$3|>V? _=LokxVݾq]GxN+1$xU]a'R@p)BkճʴxST8$d)sUIHh6+PW g]FUoʉ qGvu[9szwA]it7/__WGǯ~<:e+ߟqr'[bC(~\c} @9-sBHK[p485"F~㻶Wxm9$0~Nox`hw{b6boPjnC(NێjTТ$CCA64yi ޠmAjO\=-ZXl"NӚmj$`N+<#I欁68U%oUL ҈ VF a-R̷v NĈkG(@QwyBՖpWc j׌jCdNH C~{]׺7l#AxmȲ@4"R@v$A(^;R-p5FjĒ} ikK& $uB,ғa^[b9 ,zmSmMwL(LI"ut`zfҌ)Uq IB(Y:{)"`zAz ([s(2cF&5= vSxa/IzwIdW 5oRǴ-<,K>P~<3p^-0p,; p`iaoZ<Xvأjt^ED> 8|>aExϼ-|2 `6K2ED1C3iX ƪ氪"^ד~뫱!I-v]˧'X`t- /`(zd'U-nTjӗ*kU R R?͝UL/`s0ڱlʆ(Ձ(W0Y/Rzu"mqkv[ I'X8⣳f>>yKC ;Ũ.z)8_c* 㓳thA 9wŒm:qsêA淳0=4k)ӓ HؕWߤ%񈟢ke =xpq]Vo`9gm3ArҁȋӘ\0 |IRX7&,FEXqZ \ԤQ6t09V$c{BԜx0:cճjVٳ><{:A7g8sTVP-ͅZc֨qϦg5!4ڒ3.M*B3a.}4` /#:%\uhwXS\/϶K$r9N=]D( ?vRs kIT7($ApcB aa%k6cQB-@ tzt( ߤU>izkFNvYaK Õv.kKs7Almi b|mieҳ[DNAaIl'w's<] |7 Ş4+QQ)ϣ0at{ ʧ9("0VOf\U jT~Zp6Q3VK ȯ]: )44s,{a6ų&x֑T1n5(`<صPP)n")7eB J$nBJMnѩt^v+ݨ_W;vݦkwۭ;weԲ ev#mI)ۀPL(߀P9|7B  7 ޜP{7B- 7 Y&ـPgsBu$ـPwPwB uw#ݒPwBeB 6'ۍPoKB  7 ߜP7B- 7 4X&4؀`sB $4XC.t0E:#PI~?![uAS$U0W'דz ñ8e.+ h 6H|dRJ_.Iz'xܙ/PWKW𵧘h2B FoKe:VuQpmw?k,&i񶬟9WHʔM柄*v9aP4ϣq$9E xEW=EH}4ϲ)P ta1HoZq*hT}?OLzUg!xV0ONtGUSXE*̝leAswA;ƫ0/r <|^n'h9,O1NoqZD#]`T-Xg4q0?qg-G}òTYe_m4EHmXDs1  .f9`ulrƣ(K:?w8rn$Ӗduj#m!^3o͆f2Ljѓޜ&h[N_X~b.!2ݫLO@{&bFo kΪY(Oγ5gV6@ .1 O0>!vUkoKgda<-fU㪏徺Տ43 $E ¦-bJLES L ˭u'6{i!i4?}(MT뢛VťfsHo2t,béoA:A$/r&|Rp$t'@lbmP>K[V2"ōNF%dD8L_kppf-=P@ИV3XȐrQw-Vf1x)a' #V1aNdCt zV~1Ak5~fZq-0(IT0{ƿSEF56 A 0 j,چqLPE:I `RyV7y/޲7P PBDvQOl䕊Bh2`cXۻ@F;toi8Z dJF%FRޡxsn+9[NkF}*k^n,Q`6J-\Iz/CtD|zG0R}*NA֯J2[͔7lU ,%NpVYA;}^|2e =`JkC9_BLH"i#˙)$o-HJVmHaF1i)Ӹ?y.P^eZSTKiAF#Zeoү.W#+%I/au0@hStG¡KBe.v^J'k3H8Zs4ȿe_28u.LXZ_<,kn\UKy(^_&gL`m &a%@_Drӵ}c>>O՟<1":@IL}1-a jOv]L.'VOF"JU\ju]b&Gh䦚"J#|*D\~.$ 5KV`r\:qh_2Xl"M&^r7J`M#4HK%:,:MJ)TϘ^7J!deZeb>Xj61 &Y| VGg!QUV 8 KM \?<tgT[xLD.SEو'|[ xט؆ZZ{GtZ\z5Q墨駽Aɶzi j5ìܧOXh۝_=Tr!7!7Dk_%3iBu8Abɻ6+^f#?8"%~&l)u;&@PC2&}U~sLǥߧߝg,99=Nvi L9Â"PrFpJ r`!э<]ps̷JT%pڇĞ2d_jNAQAh%1 gVG$BWCSSAm$$jB$Hr#I^&K)KS(V՘X"U&# (Ǧ4Mdk@,0obdH2X c$ѻA CNn4*y̍FT23Ƙ(Ј2"'CYBLE.&sLWVi萒i}. ~~\ھRnx|8Sp#^ F5=ꣅwo%jV- *k"c1鸢c‘Hz vcƀ Y  aD#F m2c'b'MζʤHiD-,t#2$Hb=j,~;WE.J#c<=63Z{"Ż &2D$-2bП]3u.30ab.+3'= KI,oV}9G>Y\_{k& TŝAy|tg<6$8ή:x\{ (, U~&(E#B9zޔJɄӡf:TL U +6Ĉa M(\Br hi=8SKHƊK"6Br/x޿(CV/Ռ@#Bfb)WâB5%r~*x8iAXӷm١s`M.*ш$8]pD忮{_$T(yEZT0*!ՆU=%o-"t=%vng#qQzp=jɨfS3]5ZcT`2[3Vm`$YUJ֒Y(Y{E(haƈa'_6[BҋJ>Mͮ3`ٕ[>)pĿa$r0ΰZV DGVCb14X?3GՐl;Ù.b0Qth'ć4"Z5D?vpW(ΈkJ/,8`RGM\ubQW_gR~ (87k(p'`81>U/ D] {9 ZL#'uj(wDvUt4m+F7 #?+m$88*S5L -6DI8ƈ%D &D=ei3 vpȃWxOz+X҂3/BJ%µc$ѶKf+\ )6EZݎ_L!$UwZ|09J=!iD{)D $$^&\ĤJ1,kQ b([GHMtk.Yqd1 Pg8nZf-Te[B"Đ~=/ޣ xN~&v8o)`LGScK{g=טufs f-6N/úcOqF?Ld ,.I ά4&QdC.{DHkޑ8fpg\ RdJBpSiވ4 BKpSmXՙyx6ͻ48$ONh/V=cOp~2cK<܋V|s_>V[ KVSؽ!mB8O ^Եycgs.TJBOesEÑxͿayі&#LÑ@|WWR/4s4B]."(EMAE^10 ^"ƞCj#F/;Zڥt%F%ڂFK `ppgN:]Ji5Q v nh"j5) =å8;0 )=k&=.Fpnb2khTLz]}p$8η*Q|rQ0-9h4`Lr AQ!#nN )1QY0Ck,UZ ߗ!fƈ:2wk&xSզN fcXR:`)AjW]nNZ=C*g5x}F Ps38w9x1*W;*pBiY ٳVĻ2b]!Ie貼V:ZL8G^:s19j=ڵY#0J'Ѓb8hwo$ hn6yܐ Z~zY?}o$i.#Mqt].FG]P6cWLA>5p}姥o=-8>-I0w_1lg*J־$,>Hb< &u2`L2b3T"pI^1~{'J:I a),MgKVnwHP\_T8)%*@n\r,x$R$x$~tBF11ltwEf)ôAX DHA.eU Um߯O^V|o6SKf8tQ5 ZrJR?*":~ )%֫4=d 7vݯ;Ѵs662U J\9Z nWJ[>xa[V e_?~n_MV !)y[ҴƤ'1b~Bey Rb5ny@I&*-?ukD|Z5.4Ϫ=y WEbZ!k2IW^$8f<[xݐ8^|EiQФ^ͼ]ᅭ@$}9 3#l`xfCᓀ$8G#ܕ1rl@ċV@W .êʞ쒷7#35(ESn,Dtlgp[m7)lH^}^^ LlfU+ojDoFYU8Ad0͸#g>{G辰 ƴmay8C}X姭0zu#pMZd1˦?^O-8x3LĐ3N/ 7$餏u! ~M&3 Llr2Gy$7`*Rh2FI{-Xu#l,ɗ(Ip; h&.}H|^6V&hzQ"𠬡?!Ou@~: fhi&o Bg~ƨhVk.!}6'1>簑Pj+l\gKoY?LKaO_ܫxޛS7 E$Bd+m(~>9}6d8[+N7qO%TaD=w^} <yu Dos*҄9}*7UZ] w w9Uuwa>.Vы`F:k:M"4N-W>Du*/f_73 D-Ka%^ roP(U^Gd훼\-xga?ux]u"Cfa#hi%1fV/T$WYz2̼q3Sh2*0?E̺l;Yŏ#W-CSTie I2IBE܅.0337W}: %!5pe/Ү\83r:N8Zڻe\D2BR;&s1{p6[ Wxg` |Er9`Ѵ0KԮzvoS©N+,ژ8ؕ!4oT co*/*X"rmpIɶi|NZw،QpxNȗ!_̳ՠ^I'Nj``a-omr}o;qلB8ew.Ûp&3~,DF΂Zf e @i1-x#bҤ?#naB5/(yK-GMtCPNluM'f&uS"npAiem{ =&O(ib* --$H}aVh$e3&J\[mV4Dkq4zچ ZET("NBK$m)[OR0d-flVw]+K)u *< #pF )a[`DiY!L]p^YfpVrc3lɳ3p'4L9)ý:,ܡqű2!)oB<$^ٰfFHlF1YĥC$ }؅9$+ I* sp3ج\d-N c΋5fj['B/ ( Ge,54l|/6(Դv\Gd7(nWڎcT}opD6(nsfGPMbfWF]f\a$sfpBfi&a+-%i\2$;(Øy!<|mhZQ#hZy͠Eʮ5cghrl2pS*.v# q*M n)hrѰfѰEfsv{frdI߮;윅N5crB,(*Rm̹4u@##4Y&9L0iE:eDF3V 5#=us$#`ʘ6Ӕ#󞀟-Iap1Q M+jM+s nyjfuӒzMkZq4跉SAVޟq3q`HZlD=״ڀ)4`!~O*´Ԧ1Sdq)">z3=홢&$h5P/|rf6+؜]Dos5Nt *۹,அBg}Ͻ0'}?1a_K= R#W: M|2/X_WRbYKEJ.&W%T$E>H:6<M3<W⪒$]hKWlpZVO|/Qh0k? IY>x˓#kF#U\4 17ﹴPL¼T*Rj3NzFM_O#Qy}d1_`W IHLj/DIL`h:vF*\ ߭&Z;}ũR2 x,aaJ=Ɯ"Tf(Ej[;pp "l"7n&s+ۜZtKhD2&Sa*8M TF$A=Þ!Z{NRc([FN 9 |Kɱ))g\C@|l jd 7 1i03/(..5VcSiN M!iXgeʒT3@+_d!^4)q^)+45T ܂))֭2 x){qMopOy `]撤;`qk73J>f߫tޕv?o|~` -yz qgUOb&P!L._)`CR\AJt ЀQ/P͸u<|X_HsOZ 9 wA\p7r(;x /b/D{lG<Ə+ć+YsxM!n &^EMF,I#$e5 I8S;n)\~כ(qpNd)C(Dxj͸5RKmwy3R0)5v#eG m<3_!'Q1s.NqE҉*3,GH>D.S^4,2$ R!RZ͹pfZ`18Y q7EW+a0% ˌ*<2ҥD4"2C g2Whe g(s)Bm W_,M#d+7X:L"JgZx6_; ƙ~?. e4"JAV5)fHQ>Ē^1-o55bMS&L5Q<8Eӈ0,ۙ#y9FipAl,+)&nOYr0&$uN TDCFC$IJc; oQ VBC!ll)Fu+ Px`S*@b-<>¤;r5ƉёTڔY_1iceD"8ijI"-I 1%F[&R/J Bوpi9# ϔ*gRY\F(ԘLGIdUb : SZ2R132VKSHx%SdX8ƣa%~EKؔZŭ|K]2Fc6$1WGta6!?lGV` cX}W(ZiJ5EIi$)!Ew 4•+#kz6no+ U7i:Jٯ7؈u)F>e{\hWhַSV8äA%Ź֭՘4CQtC̪Xj- Tp<1k6JaH(LRIc&`R;y`߷ڜj]L!;=z`j'7}>yfpO] ZQl]a0T^iv0xqs8zѿ. b p}x,-ٺru&4!  Z3uӍ Iنt0<,]ތwClڷlÝlP|6Bw gAq Hf%V 1RtRVj)!*ź%jJ.mWf x>MXmZL\8_>岓Ы_3&VסQC겘QW)7iCR]:eeyԶePp<5r\l7Ԟ }ۣj(͐Vݗԧ[]:N 3)Dh"ɢ0$;}cFLpG"E#<5ᩎXe()j'&%cf$ikOJÐum**xϥ5:DZAx7dKebPפGL͍LkRSB6)>PS5+O"R-3Rqz;dH@Q^ZQO`x `z!aJ+պ+*)֞Pdhx[h~-.,Jɡ{^i6ŷ{{9/5y(lb90/.G$uy09x%nʼ|= bo#0#ȑ7Xl̮ܢ|! x۪o=w?a*vyA%\ՋS̐X&0!!պG#܋J9aDQK+៯*o{ZQ8+G8UŦ0WaSE086;a%hNlxGzw߀pb[x 53a8w KWr9c6s{05gzzA]=e=HognAAo;cb 7IP2 /e>zm_ݟAROuE!l)h;,R9zhmNෳm}ķer']o3kθGWrC)*c ťBZAZmVlՊnO*J#k c-ZC|MD9R A h􇸉ߒG , O_Ew3U4,͢Qɰ߂ I6>nXJ5dcxłm\[6_hh͘J$cQY?`~/JPsƨI.Kܻ:Eu};Xˬ+i=ޫ,^3`ͭ5A+w~}n6LnF9 pFQ8}Ѫ6)/7#!,8[+dltVnˋ*Q ́Fl4 K @*gi#/nڲg{`|(؄OHܧ9<S?ߗA{y1-דWn1%+A~;Ae; "X*me*>'qKivQTܓu"1`]m0Ϯݤз&7MANA7&z+)QWdmcNyXS鬜+`m'#@A  V%2i1ncO3;&:G|AoMT+0ùer^bՙ(RE!ڣc5Yy,ð(CH&Ls~f[2<\_v3zUQ&8uFu3vC촅ZX A +pUX])_3X2ZXjaBf)mH<̕drY,|<2;b _5G2 t3NY{ F/13pGRW_7@fHJO饳~PS3n2F_1㇆0jn Zk#P q3/Q>ϟC?ys\&9lq^ +l띗b-.QݧZ./p"=5rƶS*9\ zSd8Ln8Տ_nޝro^V)/Zu9ШF2~gA3vb|}[#G}wRpxTk}_ [X[,'ïQɫ%<qh#_^&ˣWۀ8~5&8Ov&n2^B0q] >=!%dUԏ *dBb/_-B)Y'JWL&K mz}j(.60+e/V}|-ȉ?7Oo2Y'nG6.[ >>oyah:01B qkF'w~HXƁtd ) |[P*Z`d$D iZ]nm& tpWP*8m (K7>%_p:OXS5x`쓟0 p g|A+6 =z6q׆ P5ODPiC4욱mR>]T}z^ݣGWX;/;ܯ2~Ϥ s}>_9nfo\W`܂5Ǵ83,yulY, 7s3‹Z_YI.)CóXb'sOFAr13{0kVPq=땋o+S+wGJhs"*aXĪmFJ#{ݥ`Vl-iEA 'I`Al ?26ꈒnO*Te?8TZxoFsXIjCh=m:dꄑ! i V"Y$E`v;EWVxy_/يhXj<3PJ=2 ՓY̓8̫8=3|Qߝ/Ieoljǿқvyqy O7i|JjΜ#__ \݇,Ed|(IX]FQ~DRQZnVځGೢZ +ep h)qGAZNj";0:tSݗ4pUņZd7etػge?~U=,FBD΄Ai𹿞ߢuKYyI2"_S۩[u&K|@.'N^.c ߯% qÃE=_=[ w\ f0n9_>F}aJH_QsH)E';>቉_\%@@R~XXJz`06Cl |Y˷gˋQYݜ\m3Ss1?[{kylo~c@?e'/uܱͶj3즿k}O2op6ޑusgL2$˔yVDFr[ %Mm)XNNfR>g0?=NՇO/5#7gl?_fi; [vǿ7/__>O$hQ/DZøFz+p@͛ٵxEMWn}\=?2TtDؖo|7Vۗ~+ ^˰~`h7؏=6?}dȑ?ޮ6wn_v~~=?]4[U/<σ_[/ޭ6*N!ظL?]GqC d_R jgez)/?~C8jOTs,Zq,3n D?Dos4۳vAQcT*{ [ {gxZ  ^URf_(~Q?ã 1u#yg/ O[-I\v~)S1R;5ȇ]FmL{w3fGIL7V:pyo}y7<%=uex{bYy^}yX)gFqzl]g=X9*TZE~Y/_zfۘ h3E.0}7OZuj#^䵣poHEg)(2s*wk!A>r9lP2憑4+49 m{1{0㆏Yq]ox.QARfsHH`y`ަ-Xc9Frw5eC.<FJI4Q-]g2r #!wT\ \(qUX jKU`$ PUKCу7|,6H{TZ+ I.N,Fw"7|,\u^ B fO\Q ρCL kt"A/BPBn']]FY3Läg,6i崯vy67f(hզdny\B1j婺y@)+Ȟhg"&C@] uqUfF+q$VP NƸ7FL>h9nVmX 1o-:J(KF}jXlnF2lcО锇ڝik[%i|8BjFdX"0KC׃7|$M=~kWyIf>sMN@Hd #j(K=%dVzb;`p߂PH 7XjPUCw8yIR7X: je7sZ3"&AE  |Oy}71A$h)xN|1eMJF"]AF  :W,_r.RIZ]HPNJ XI"ƁD  9򃞿H%9fo9:AdJg!(%WQrHӒSZ2jXttuZT8GYK\K"%UUV]=6%!<K p-fj? > jOm.[HKН1@Fy' \ľE  8/H?T$Ek %PM`E5:Ao\0b:"w a6ww3Xs>]eYLh 4[㙛90D)Ui-NF˙bY;3)nXH Ҹޤ_cMK Z))YjA%Ɂd-ѝa>Jvjib椄-xaa.q%{14z㆏\)9@V 6z, 8XSQ:T BMƔ*zy;'.rסsO.ޣTۆVS5ŕBBy>N62'; ڗ0c[r7M-F^ ^~EbF'Rn*`dp[RDg)|#|qxF%'n7-0/Ç *S5HȜ 2c|jH5X_H-jzzrPS%Pbe*Dem)IRF l0KLKR) ("yASD<`&Qǂ]K:*b"0&} ^-h ׫WԯRh9pT(&x.9m' &ɠ)yoT>js5A"ܹ\Bh9t^r-zG۽=DЄAg'z&4!inuTwְa z0 RK8vli/6eIZ^pZ e1V"nʤZBifn3ܶ%%NA`h;Z`RfUaFݼ^J>m@e`͖#+FNt֌J8"rX-XۜEJ֎%hM4/L|cѵ'=<1 7i?z_>}䊱L}!EKJ JuMX "hÃc!ʼg֢ RR2J^QI8Pvkt#h!G  zQs^rhR^#rV6Ѣ%qKS4[ w򸚽<̖kۏzR7~ q5a:䳮k&ʦQW~LXAiqO=ȁ 3?#Z<]UZT|Gsej<"-n-TSU^@W=aNΰ1Ҧo{ܺMK m]Nde\sjHAB̊xp">mT>Di0 <6 ύh&ʉrb451PQO'4dXd 62 ^אt!Lub)3 ƶkt;s5KxA$JXe]Nh/ί`EEw84=]N|D(r$m"ِP[8`rĴcQד72]XB0kTܔNհMJ%X*~{%q~E1OXc1p샽Ov$X.TUC陇d&* -l63#uz!0/鋒_>+e~ Z08ryJ[=͗\Y-=&z+IJ;+p,`˘ KXF-_ )!BBw 2iMQpuD1M i3崔{3)?V %=GʹL&e`(!R୥RM;KXϹg~R;HB(*rcHae%P@w2n#:'Op, t ؛3x ȭ4XbTXe¶{ۑ+qA(wʚNi5QA֢w!^Nӣ3;%w훃,!kYdym1DT Z 칔k(깔R$:uI1_p|v Л YE-dpo(% {>X) AZ . 6Z;lJe3{DwKuL .$. ^ *$}TSƘB;&f(R밣*8΍ 1waY=Ԏ66r-8S*N~ wIN%wjO"<AS[>= KT%QƅhN߮w8(L^iCr98;h('ɗg\^>T4 ^Ƒ/XUൻa Ƅէ !@٧(3 TP\L||"]Hy-}ns|`[*kJ!W.c"P~w̐.f7fNk TqSR|<ۋJu1%KաRGV׋1tG.[f ]zM8rcMY Fw3z2EBה\;eIPa{p ydKHv&C!t6ۯB"Hf]Km43PvdKIL8Ûl. 󠱒W:^kR .4fE(CzXkmn}IB$XY!$0UC/*VvV~cdE5%ӋH/H*R^a潔.-biF-V1y0-EdK#Ljshek_C&/>_Br8},+k$uG7(p8d_n6E2uA¿==*kʷ!Ѐ\%)_-gNe?%tr2ƑJ}Wui\GJ5 #oG:\>i帱\.7|KJz!#X.c#hp,t,^ }k.@12< S _ #UXS '@2zt?m;tu6V =28T"mvSM1Q>gX_:nGN[PY=}1./c}ˌqvy/'*8LT_X]zexw@gF졓\^(G\01 mE{x41h jɧw$FLg^xLPV*2(2ijbDAsgw--$w|jg>&5;Zirp)MF\(S[Q1+if"Gg&Bh45 LLn1(p;SSŐۈ A2w~bO:?fAso棇]vШXb9 Qb׷mr_ ;.N i-6PtDpa2|@cXtd   R\ Mra -jpa vXt@wzsؾ5(/%#nGD!4hOAM8ڱ Q3=M] zj6~o(?p9}|G1^8HEUi]['w-NwC u0Ƀ _и {"s|Qr,gtRii9IK23ar6646wArdS"4Fra\AP:N1Gg@Z:tӶ?]3\<†xwυy1 꿿5C0yٷ|Cbh Ow|^f4]6^e8>eU"]MD4 <\o rބ|Ԯ?TӕGANpG GY>`V#6Z/6оJi.0em='Wqb8E36suho8*:?rIQL߲PHYŊUo$==(v8@yf$\ҰLc)H6}bRz#4/ -^^v{L5zG.gbGCuM;Gj Ӟ%-sBvT]B@.!?*6`߾~{PmظЂ ~TV&+*ɐOB3&[<2ޯE:m:hFH`wVyBpCKU CGTF]UVitP*#?]!dH2ȊzwE M) |.ـ0>6gpIHDf[쐳"\?̰uRuK L]&xMdϋŦ= @"?cQl-rPӃ|E>NC]O>O̳OIûꥏ? RpxOUO hyoGh>7ǢO~P 9 ) (Qo(YJmzd>LrÎ֋(AxcFAva56C>S˧>wD±yϰ?ɿ"6m7=E>&d ɶ5%Զg&b^vMMr[]bXO:t;[ۼ#xO]5^,cVOdsEvU(reG$3,3e n+kaj2}(ow SPxCUo,5 ŏ݁L`N+>JP0:iսby+[8MvHA-8~,ZćUEi}X4+6ĝ|Go/N‹TxeQػE JΉ"pYPTӠ@$xq+h1f6f:X6^qoI) 4ԁD%ny0Nm #xyC)ʂ AuZp Lكt?K"YACz9ԅ; 0!,ٹ-U8 ۻǪCpZ Aiic,YƤ R0ö1,qtyJ!Auk10qz:i8[ݎ93/ʹCe;bm4ڱ'LzcK1MC.[uJ7Ohu*s#=T'nj8@]sw`"zBS/!1k j5(lǦ}v:Ol$*\֗Į*XթB8s ,&L% H.~+z z! y,JӒd<Ĕ1GtΞB9|Rwfp L/) *M$3=8_Sݛ5yq!{dYf1!<w %J.Ȉg6#,3' 3ʨ<,Z84ϱM)9]o:!M.NXE N($ROo:+!'1E4.Vʴ+_MV9)U< %WTV"s5D&:'$k10ql tt⪇^~\|0z5//doE#.Ei3CWdp5Reb}l7 v'c!!2tMc(тc` a҅0r7 N2T: (XS馄]N@3,aibJ\R ǫz?"p0!FWm10q8_0 ̡xV!4e&Թȅ&nH}E]ʙƽg=:QNj_z bPk10q49ˮ&lWQ"Or(&b9vÒ{Z.;b2[p L!3QJlf ^c0%Ζ ?ô #tzrMO.=NG{X?_{-*L?=V } h(q~nԎw<ڦq99BFh{<'Zy$φ(6NN9.n(7%%R2DsIzuxC%Z.57c岙YR ѭ; 3SYguRҹB+' q_k |S OQm@l&4-|4n~J"48bMBq7le!䊍<6x8> #Kl.6f\g+VbmXnci! nAaOkձ`trY4>^Үd^ؽ C" 1*Dm:a 3 HDzE QXL*RaPL{'M&"ѡ$ZcuU~Yd ge:rPG-h@# mD*fgQ%K$wEqXھ-qz#m} piCFYӊim}%!%]$!aʃaVrq 6qFƹ-3+{vc;5?0x:;Dw4q?GgZ*Q.7NHة;J9oS0)xJַCx鹶, fNP7|/n| 0lqHFv?,XfzB Qhq$ac{cN/'ߦG,{KӰ*-nAՑdXdwv -pw1HI%1:U>#ۆ=Ƴot)TitugHq2[Qۑk$fv?FסZ=GFO7`=ZY:fz3/_\]~b4U"0,?NWif ff.#?_Tϋ_Z&eZA gfWpTCmc㪫|>_Cz1*x3 Lf{7F kɈTf(w)FiGft(,H}`#wKX\,f7|5'}ϽN =o-+'/Lvo} ݑ+m1Uw]6O^ bn=_=Ƕ4a\Y tfvѠL0F 8'}9"N)(I|vC=^.x;NH,{R2.u ?{ ι"\1jvџ#0?SL/H_<ٜ͊،(x[\Zv/,WA #FlsFrT;wSQ0ry|>[Dw`T/V7__\XV]=d'`}y,2[F0II䀰륅/\GۧJ1@\؀q='qDHC7_ڻ$,oY::ln^:6N2y͚4_3' :\Cۆk`l椙92'ژP}&z3Ď`Hķ)N1#W ?&؝t+2ؘV!w3`ǟf?VvQ}3Vv L^6X@n1< 1^r?Y~/4̅wvylk~Z/S6,ag0;4Xbw8nFi1~3G^D\̢6ٽoGHG5:ao<|\_Wϛ§[O|5E\[bZYFJ:Rf_ɏ`aW'6c E\CլgE?卥BQ!(ε8a9C8FYìN*G}|f:qLi e!ϛ2RsٲQp Տ5>r;ю^=H PwyR;df~ 7aW[ࢬBko}ROhtҐ<5Ur8vwUZg7eb%PyIUd Οj1Pq247 AE|,iE`Zs"N\}V3^->gŹV,. luߺozH*f9>ؑ'IWQ97YZޗVXxKF(V(tquYrD2K7ӢY/҅͌US )5=Xˏˢ,ۉdv2=NuӬ14gQF^`R;ɽƫ\reʼP~_HJ!- Ɣppe y!i`kA|)|^g}HA勌}ߑ 3bJ2 < bA+fE)oVcD ͳ4N쥰*&tXo@,}AF "&?-? CN\%6\rxX1F}ƳL %wD6 VVlV,3|WHPe.s,/᧓EZkd*nvʀ1h-^<:0e^J;tIYP|&T}/yi⅍}~b0{Oqz~ A3/3Q-^R(9Md,|aKsr|3/\Kٵn|}{|}z-W.Ϛ?پBJڲpY\'f[T~kgiEe_GOawZ;NlS;Wtn9dp`pd!ϻ}, =N)u1uߊuzu' ]ᨋC2OtW_0G}4Lw;CW_&ھK)vLL_3}PN Rr=cbDmbAƩL % #UqnMk>>Y  dX+/&Κ{mλCS;#U~KWh igi%62nu4[mMr=)恲 /"HR6c.g2m_b`]pλܤYO䷺m64=K"%SD88 9}j3p<^=,.58/xSVYC^v-O^9$ :N>b } k2Ϝ$J"cCB+ԌydVb ,(囱s0WW.ͱ՗{rA)L󈘴q=22`0:pQ2odY0icD nD$/Z@ ,$V3YdedNyNŰ[ÉI{A|Au{G'$1먪ihG}ڙލ=;=оTtL!!,C]ZQ,OFO($ {aް~ypYZKƂ󖒱dqS'JT˹seLY9O`ek8ǹXK[fZD=(zzbƷЎ{@di%䔲v{O=# sq$}IEr5#Jqta|2%A1ǂHa܁Ӆ2]`M6z[Fr@Fsk-*!F$^Jo ۠76*X K[hqĪ?K0ѣɝJ+'!!Ȣ8!-'.,Z(ǹ_*8'iTV#G  P[yzzВJh(s޵%WrtKgh61ȥ1`BӚ-זͻWxkpH5Q)B:}hDŽ$jŘ!.CrT2*.q"즋zL\C$O<;Jibb 悷y$4Y,hR9*[1w"Zz,~@V{fm&jl=[7s;?y4zXcryG' ǧmWa5jT޽=sgŚ݊pvCV"X9U\.n\!,ye7V.;(sbtl:zrMǡ5$hme f&,'e#@\t"KkƄ;)eSÙdG9B1"E-ڤ eC3bޏw_"8Jl&!'&}\ń ybT(.R#ʑ<3,'[(P抒zbA(3< QfkFm,ҷVOl9ƥ7>1ida19`ufx7w*Jxѿh'us4b cڟ)]԰lj _[o7޼NaS+E{,(G8Z8H8cQG XJOTr+?L{9?/x~dM?g^'?__s*MQGLxe5Qϟȟأ.-K#/-_^ρ27[onEÛk?|&?ˍ3V5*R#7F,-ᑉg, (4F|{)Tc@V@fQ 뷑xA|{O7_P=EX1M㿏]-K1ɵ_@\|͉)xLN`$E@`2p/06)zKRI.'ωxQ-8~K]m" ohoh] GIxLOY+.U%Љ!PqR8Zṋ0;*Uc L1ΘNB\Bh'2Lx=qh}]?TIYmdl x$(@yR[I)G[kMۢmM郷7ir A}&%mjalyy cܿ>vx|5lk||q@M05tp8~Y6F{pESڵ;^'~HCSGքfOn8cY'L4%rCNJ#7 huCEߑ>ttƽ OdIyCZl$E`M0 G8>L"!}LpI@nTр-gt Bl唿´GĤ]b a&L8RZ d {+GZJ"@ݹi #yQi(Jpp@sSQpJrrRcE-E*\O0A >3\3 جHa)jI03:k8O(Mˁ (P XDX`I8m)3:?8y,o2rsf%EwQCKoX?Y1<#WYec֑ q(`Pkq91d qJCSfK*:lތFpP Q}3`7OEMǏV0 j6EgU!F$#5 o18LW0,(-D9 FZjnOp*JbCJiPR a:`ݷÁ q:GvuSBҶ`7v $Ck_ :wL(f`0 aDk-pD  ̌(Z̃noS`V;I^TAt0L=+:lWxW̋:W2w Ooh8hK'!9IkpW& ?đ3' ]3΁WXu}9"N bPfww 0j\RӗI\$\%%㐎!Ď7d ! uRJiLP_R{( hm!7 N3n<>VTޘ$EL`+~t`a#+y.-R?u*{:IO0Fp0KٻF$Wcvр01ƞ<%ZIԭ,")UժD`,U_Dqݸ@4gzE 0rLLe ˀj(̜̞%+V 2.1jdY B&dndzYhT3FaӟzQCU@%(znQG%ɥgN(JK JJh)\fM EJ;P#?S<~q4s[F)Au vJKR2i!؃`8!IdǛR1RFUrN0!>:XQ9?6::&[ Jtȵkmk(eWKhyF0C >yjP,0k!"5Rpe^dLǕ,2Z0m>UEṘF}e١qkǯMVKZQ,ٻ$°6> ]Q9{PINh~hL0|kuPH9kgNÜ<QfiNpNv;NZHFBPNC@Zk)幁 hpa6𷌧W@(&PV#iV% 9u܉6EU. ٽuhW0sw׵jTҠWY֘h~"OD0AZ(q`/ v'B(.fѯj&T$̔8G?ΆeĤ٣]q37p lpV8>g3'IV8lh~&IN*z<^Ak(̜rldȞv*G4Iu=]C!;:H0W vtudSԌQ9tP. h: By/N'EϨfR+"=MzAٖ&~$t ?XX$`ֹY3d Nr̡knr_grz4q o-7=w8:Ri^z^Q%;ʂIޡ1p)11 } `֌Q9DIMFW';H3OÁD I4ʼ+3ci̩2c_BԢE-2)ǔ9j a]`bGC+_0wt0:0v:͕J P ̘ A"Yh|8.0:V bo FLfm\Zq=RrSXD"`1GF(3i}RU5 P&J.*S4 fdv s˼ GsD{-ب-k6򿝺%@Nߕxl(f̡'ULQw1J3 Ǜi F*U3Fa0}99֌Q9m\,;(bsUTsk3S/sL +*6:;UPJ:8o''UqFab>Bt}W-2H%F*HCQD(wIyT3FQH7_Ur޿X`X,&i+ducš$vU C{5C?4V@5}6L_ܒmS0TILklH,cSs׷şG68;@T˛Qݵ2E41w#νpg(Ͽ1[*uF8z<׏YxFqY⧙ϫ+? oP &ֲ}y\lͥ[twn<$Vi?7O_O?ņ߾8 _˥w^'tm$gyNJX*aC;%/zOo6 r_0[s~m"q2 mXS 0 >ipDHn>|l. ~gñ],udY< ˻Swp<:עx|7ܟ`)n`^ \&9* xrw;}fD v LVAũ_7x{sgr-!>.vC/SblTr =)xOW[~Pn ;WϿܔx?ț彝]+2G.rvٳ'F]u<]9& 뙱1$SI8'ɹ{ZTIY=y%35s9kبWP?&j{ {V ~:Y2(rKf0&0V)iwc\}:tλDv/U.UDq2A˷pkaѫ1j`]r%_?q#7!z/x~y+o8x_;s|:$aUW@c" O{lDuSr> vJm\ g+֐Ȝ x'T(]>h$>gx$"u'dֹd.۠-C[p-b+/z-O 7s@ƹϛ|VNƌǞ@ӑ$JX8Sӭ'M R^=T; ([]H>E'^y@c R8 6.NX`9pu\`x7su:w|wWWmվ@z7'ݝ!>|o"?Y<)ðzw#z|7Z}jN{Q~ey;;l29Bo}x;7FI;"۶zٺh_}q/YkS7#p>gvQg|@fJ^,cg?E\sLHX\_!ǨE`hDqPY$;ߣ. Bxb҇U\y9!&(ŨP\0*6U׈1xFk2Ap;BbhZn=5LӛGz~c^!g^|%i7<[^T;~Mæצ`eVG$s9Z̦.`;q2Jp䵦VDCI1L,q)ku"}lY^Dm{r7xail=9x,:sSWxQZ B h/N,u91c 90G]C}A Kx{Ca>>ZGQ}>joa0'7'qbhbk mUtu^4 *q v}$_wPۍݴNkUyuڡ$W[BKi&7>=vF>^H^39dɩV+9MM,"{6, tc ^>yZ(ZY""8ՌaWaq1~gr{.tx.voQ?3`t{z^`f!>n.#bhNoX/ioh1΅b˰|9œksh$5~e]Syqg!ΆZFmV{jn-q)Uk[Vo>6SNoe733rO͗cFhf eG=o? lNni we)0na3Ĺ^a ·R)%ѷnq` |!i~S3 Qc +u>W}Zag?&Q?GҤ6M5&iߑ六|ھ⭞ s.,bٰ[B罧ͼnwn8R*ki;֡/{ lPܽCbvW{G1ǘ.H~Z^;-BR^r:fy7^%:fo(j? v{S;X`vPdQ|T>j"}Fb}ݰYPyfQ-8Lp)轧_3FZ1<|2~^+?Za5X/d[`keEef61Ƈ7EL44BV$,Zr2++CvY iI(O[)rMF5j6|3m"`g's[qv9nGOɣU kϿoׄh-J'^B))&[)`]oW12#XP1g&6k/E9tܵ'wGVFgi5tskm_.ɀ%oG J\B:`^e.DjB ȅ4*TPu4H`&$Z`BXGm4ak.:{A<FHH5pFBb%#11)DXҠ8KFbRXW~Bi#ե`#_ {U>Ipb4JZ1qWg~xj/O?F2Sj ıt  Yk#wȚ c%~-/D$k?F_ϛ|H q%yS1Ncl|ջwk.y0 2#]a #3]3Lde>1YMALGN67Y"V;Wr'5?ֶn7Ǥ_KDl+1#{>'~-@&-qI*ߛ7eE Y1$}?I ~9XRC |qxinx `IPX'gQ;`T+PQAw9f[!._@'YDE6RiN)o<;ӘG"`I4wFm6*GT  ! j Cʘ4Y-i,k-?y1xvshLS {7 =M?Rh+bg~0>/h$ట|WoR⩽^rÜR3~ƒ(¿eSQ)C3DR b Fa!Bc' R8Ay3}i:U(GFo%ˆGrҴsq I^Ikp2#)6grm| #2,WO4k@8ĬW逰 WjRz\Tkn r4gٽD<څ~Tm5)N팪z g8='H9eFQi@p9Oy:~s^=~p©3zj?2=d&Xt!~n8eRZ'hPtKvcJf#O#\ɝzPb'Pt-TJv~w|AF b՗(sĎwO+ΏM-7Ovqq/ .Π֥J;IǷ5~E{O{)3婪JS^^ rCw&it8+5>aje nj쾬͵yMgR)]yxw|{N) oso.i=xB-Tf ]9mga6| {]ȅ_MGlhS|Y|z$`̙M5o=.G.^7Af.6dO3WSMN4TY1v1ݨw3Iq3&)mŹ8w&qQ@),f Y/U՗Ix-ջ^/:§^Y$3PT(wd޾> 'eS`tfFd]puCkΘXUW҆Ϥ#&ה*4.A4>n>"%;^p &/vx ~ E}}'^ÂsĔG Em<#".;X+\$JKFDbT)mK8NrO>b-Q:0_-G]ε%0u.YK`5!@ Y6n K4qr)qr\zhڋyIPNJh-5u LK?/t٪=4|Z#ܚp0#X0GIH΂A !.Bl'( &). ´g/rfa+gםI xe ==p[t>ɚtiD́s99ٞH70&b <* 9N|0arroD5AXɷԸͥ}xV"%Z|B))&[)Q[M*Ffd j0DFx(䮥ؓ9ېHrϴc7)Se~o~}Dw][wvlSI7|J4y'345:<_çc˺ |uČ1ʼnbxbW{5v*5Ԧas 4LA6mqֻ/]I'垓[qwoӏa[.KVB}~ +:>AߩoTTZ ;\ns`kZдᑇ44avKΠߏy=qI&hc|:>,T(4&I+7ӡ)%r΁PԸ;q868i7HdR\7=?P"# \EZ LFz8Hݘ٫2d8:{7(dS,vd~w.\"ͼ8g֨\r.4 vwޟW심$q`(-Y7/tè?) (& %5($NjͿؾwsՏIj؂Kk~OD}lr{}-t5n[ n'j.Uf={@wGβuƬݞ=y'Ã=@.Pg}Tk]uqZ,-KWSu:rzaݤ?D,Kzl$gKJ%;wo_g?|GșAξR#ZD>gxY&Njl f~0ÀJ1ϔB33 ߯E6j|~Ted9s`k#0KFY2q].*.EgRƚ #% NLJ8LIOMDAW{YauBQHHnǶy')1p۵wڥUZRqjW+p!WVNU>V%OܼlRo%{Uے W,T4C :C^[T3fARGy׭AO#G;?ۚ hIu=73Վۼ ZG`ӎfx8v}:UmRx ˯17#E6I eUj\x!@NOho6KOh3IrplXk~D:"BcRwB)>!gɻÑw' u]QTJ9W9v9痓@&`* smA AI12d~tNJOb3IR616c CHJ#68R$#Y@SJx->c}bٝ))^`*ޠ7qwu;~ |m߆?tAP*%poƙO3N˖"v0XICT޼\ 'W_uVatsc|#y}dl'wYIƬȬeVB4ױ+Do\ tWyꡚfHϧcUA1h΄#ٿ2З$XA$@o'06Ч͘&俿!9x[G G+ΰ0˨Lio]up-X6,W AHRVa S띱Vc&y%H豉hjzZa=kK{{ھŘ~T큡+M|Bwie@Z]~eaMd;dn[;J;峚|BF+#H 93d9|]"B|Zc R  uHO (WKMZ2 c*҅w,<R5lп_f;n8)S>zEZZ^ۧcsG^TO܁5(吋:xh BZ xv\xQ C``CoeρIh>: nX3CHaf"E RrxFafXTqXG$ ދݖ }Uj_/6! Sh*<2[g- +dDk]OgƮO06qD℀i&q,0H@`(& Z1]P93Rˌ+hIJ;Ὀ$@] 3RK H>7XC)f&lwgk)#wgg/v|6KѦxB{A6 φ/)=ÔHRj釫$,~}_)]~e Oj?X iŬ23!ZKAB8.}6_9m ^5,6%I^^Mܲj'I[QXV|sDPt9|x)~5yͦ!i}5uz) ٕqz^,c|)m"x/ߚ]쌓m -&;{)ۀA{I$Y*FdXSDίtSwnfƎ>ޜΫg Aa ;PHBV ˥CR˥]VtD ?M62':H}e UQQ:{x>%ZoVEչo !),#ZM"%ez*š]aWj}䇧]>.=}v S\>g?<[`yQ|hmjo15UԺ]W?8}w{tUV\Ej Kś@W $]}nCyĚGHQlxVlǤT)keUϚ$nl:%n1tԭD!ZOG+'SȷzJQ"#l΂j O=c*kW n6^WrX é&@TE!hk1wqˌWTjagHu]kKZ鄒1E &* K ʂ $0 %$ 9Ӷr[[I'_0w5dR8P;&xM!CZŀa~/tf"5AbO0Y$q|bsnkkqpgZH8+G(P°q#ȧ; (Z'! \. '!`ҵ~H %˰"bHFR aEFӖ4T9{5U'ah""*b3rbTRdw18р8 Q4JGEg6YBn ?3(0N1[(m4zCIP2n"NjO$ɺ#ԞO}=ˢ`r4’if(`„xV6`̉ QxʘqǽhEpܫh 9m-&j:( J;4v@h11B(>YfHjEG-znk/"kKYa6zkD|c6oy{-'gxi$k {6>rcցzh=ز}(܁[5AgJ^D Y$$ sA"ufe f[?N?i[ 6WHCsKFD ee@ttSU myd<[fŝq/%%\2N,U%19wY9ʂnwVܿ*ow2o amE*(û^o>uyUA_ kW:vMcߤj'߳#_[ՋOGi͢_k]ǼևmW 6r +nTa \1jFK;kHE>ג)E 0{pZ|?<^ʹo{UXmGUP!CH|̋Sepgsw zN&|Q<4n?XpvqͲeQU퍮V糷8ExƱf/<|̴hd<]`ѭqYo qA߶Axqaݲ2{wȨj[DF溏=%fEjp>t( %ho}VgOa]̸yRW@;Qi\CHH?o><!pٷ? RSYe(svpSYp_zt!ڎ]~g竛`U:vi}62W]A/pW1Tm6q[ k٢q3bV>,'?Cf[(OܟѴң#)6gF\tcDG|`DwxqJܾcAҽm2VKZ)ŽPjǽ=ĉ69)YMf/\{&TWYa:L1)DIqb&Wzb&ĬPw:1/7'ʵ]|rSd>i.m|.Ҁm2DK ̀ K::)|~>-,&|ч2D^)C]E<'T)342H'QvrJw:T ~(`Un/?iC O»JZ~s6{[HPEN/JM1%|_ ~<(T?mtkqiDiԩ"ھ4NlX]ֱU_ێVtEu[Sx[ 2JqTQR-2,"1r"V?"D4޾Ɩ7}>vHNŎNxţ<.]-QeڱUAF_>ye-5`:9L-"8jut=$K`q:(YfqxMơ?XTXjBV :a,{Wx]-쯧ׯ` PVG`܂>'ͳA=@eBeBeBe@Or", Ġ9)!a`(< C*F@gnw=AE.IMñ -k 7b΢cjª'3uME-0SOxKx0~9W*!g8rZHNXLt2/_`ɀ  4nsE͸ l%^ :t^) ??=A e"Ì`P%!9 >y[LPD&YV0i4Ty?g4,ypk"eSH`܋&1 t׬Z2N>OHys=(Ɨq3*>o0_g?>94$dHxp ||4H>v@rr|Vd+X*؟eQڈ5XlLJ`PU*Fo,RF|1XUq4yY2V]p|@v,gg)NaɄT/,$rhHAD;GJQ&Ysaڳ#6̀WQVk/t~ o&o[hqD]O8$,P;dM@+bLrbOoZ˕a@LNQy0,Զ3`1xw彍' /8b]}-u+(=Z]<"OjH,t#j 7x~wKoq|q'S^waE%xFD*`ո1W9LՆR)W%CƘ@Q&Γzg(5%!鲐Ɋb$L3i"jAR~9(^#Qr6+,HQ73a F@ՃaJ3<$lDHT)_#Q% .SH"&06{'GڈVJȹa#`;U3#.e+j/kO`̈́k898&jRB1_yb{E !9l "aE $C !4BtqP$m;pf [D ' #\+t$yXt0&B%Bp!xAR1j^zE?3ʳ`u%0XeE5jr1L X1kx"j#yUirc#J,>JASIseIUUd\J4!ԠErV:0Z2Qkɓd[-x21 :TF&fKzrDR}qɑ#7.b'P(IH1ơMlm Rm{H2IYcr_6;g;QyR>$C?\#֞^<0y4c&/!UkB {gy,{рPI`v2\{.1 ;.Ёc%/=D=5k Y.L"+zo_|3Pf7W7h^ؐT$~-fRo%w *JVۙǵ7]WuW ڭ:>!Ŝ3 QXgtҪP6ڀ&K%%99Dkd.[I{E4`k!jxH܂IUEp1l6Af}!PA}6I8's3rB筊lhg9I$$HjId2]BkD"Y%fI Ȣٍ^!d+(Y(H{D6E[r GB䝵TS>%]IHVLd* d-]ds" :`IY<+-j!tۗ^ՙ]XHсG-[ge-~ctgb#rC=L%D 8jCe61NHCD~ P"CĀ!ɕ:&p!L1L!uL`y rP-͋jѵ֝tqmON.ٛ[i7X',sFg㦂EinQJ*yS7\zn(u-&*W//Sz(1 o 7C፡Pxc( e%d}e/YJ`R0_clw>gM=&6 BQ G"qҋn(sq5O)ҞXL))a9Jd^r. ޔ9Yv) YGȔBZsWD#p(|IQ8 N&H-Ѳ;g?D]-յtz:mGCү¨U2q(lj,SBq;t=Y;Q2wܾe%e=]tݻd.XIJ{΢)!,mT1p>[IcJHlM\9$Ӂ*LJ 4@d9_ "tptMYFsI"#AHe#1MI,w_ywQ;{*O/&AXׄWEQ.#uh20$:t,T^x%W:@>Kۿi:D֮_Aڵ~;L.aYe*@D.BV[z1b2NjZXAmkňڵ9?YU/> gM^ߏSJx/}3If_*^awoVCs.Yj }at7ufl53׊9-M-oF<>kx~vyר42@RfuOi;ͽ>@65rdSڅ_ѺsG?*+'iuY}ǟ6HRƓ؄'PK4PdOLSND{ N墢f.?w*W25&%zR LǫO.,[> _vEMb7dDc;7(Hq}ХGn6Kg~r}4;;J[--MIw`=wTU&m7.^!tJEODž+hї6{6˞ɬn/^ޝm}j$uM d/cnusk?,K:Ըh=X$M^̨>(5ӇpoLMqF#3^Ev,ݥ$%v_3yk9n\4z)?~}s:ew}tQYiT7nk !1"w e)QW=2/Mm^mdnp#N}-xs?~wNo=idkVw¿ 6=8[hۣyG <:>W=s;{K,cj܅hLMH$ֿ+|C}qm;q-Zjq`Ǝ:R7Y(NJ&Aݎ Ȭ~}Cu3%U#VOI}#6(ml"&tAݾFv^w:7TSXk/(}hK#$ ]h|M4<s&0*:2gR(=ARy%cGS*NT%j'^e_ o5)",)r L 1 )g&ILtH' `Wٷ77X8?ixö߃vO=4`fJ}+jcړ&=I.[`֐5A{)'fb}I+rq-fROtW#cWr)^`I& s<%=n3E#UK~E[렌ֈO9;gP , @nE|٣ 0]Pٔ3Dz(K>OԞ$A&iDAfȢ'mI0gErpT;N?0A:%xЂyAtBr(D6Έ( } E}~bWY)U)Q oYY촏N@2:)@6Fr|(c18IA0 ~'mDZ D(X5ZS'i"edOKPQBgU:)"G/Yed-k7h#Ct22X K@pwjF _445n_ Of}NqnW 9*=iK_/e{t)-0u>3=Fm|eO9++f*>~@5*|4G nv1I.QPu90>9ՇI}ɜHǫ1W ~;^РKL4)Z)TYc"ΙK"VIa" vD;94Z6sǬ<+sz֯R(`0jsi` [y~zO0Sг >n~T.6Mp>^S#+dU+Ǣ< N罽>k|,Djyï#Otl1hJj!VSR;D夆=ra 'r{Ý4y:Sىd) k6 B &huYKZk?[5'I7rZdu^k'LVD?Ptgw|:sv{M~IXҊJ}VH( If#wo19<$$7 W^F#3ކs@ˇi.&wZ쪛^?)0sv檳}S`T_,ēc:FNſ-Y{Ͷ) 8joz=+wE )`z.E3:"qcˤ?BAՈW%Zc_HsO]|2MM&LƳ#2 w  ¨F 3c3MhU)M:`}`e_e Hɘr$D$L1 lm*k1aCndɘ51j(Js5B.ǵH& 9d҇16H i)i/ZA(ʝvm *>@[`fι8`}Hxd -$gZȗM@>L&s]`' >mŲdgf߯R˒mʒN/"yT =@H3F,VyĵrQT aQ;Y>nRIE fˋZ8e_Ա %gsH<&F ۠ T&B6jXzM \_A2U?t&?r=Q4oN_:s]P_,5n3a嗆ws igi%62nu4[mMr=)ZKfAE|K#gA?J*AL->mp|lQ'HN';zhN Z\TcLI_9׽)4m[i]}@u /;b=U./.n3kaҤ?l}ez.jjvABiاq+N' DvMԛNmNNLͬ]M~3tQ`fVnZ9H!'&}\ń 55 JJa)H#c|ɍ%&+V@"fDn8_nQW[Q XMS4`ͤzw+ӻCMᰕÑD2[ls`YBN,XPP;ZxiKoJJ7(Z6wfa^GYHۖLO-gWqr-۝n/uϋ-vWpH& VtcD}̞hNfZoo6 6,.4ic&XMVNJHANz!11q\V]i@: Xwh) fu&:iSd:J7j( E  o!I /ZhN/:|{m~"%̘]:KiqEP_ 9ɕI1HdlH(o6|Oj_84ߏSp^i}<.hMĒ\T3&H e0 P:Kr\^M9eƚE;{ke"=!- Qyz$>_BTm)+" 0ֈs,"ak#E[ m5t93iUFTӴGa(q5MW=fZn©)5W&4y3KFu w#F(b=aA.o,J\M$Q, q2;qVqa-k+4"y3}7M}^xW**u NT톮'x˕'r-iL ml q&p1FV\&7lAKn7=ϴ\$\'|ou՜xN:}TcgfMLi:-o~?ۊ;n_g' \ Jpq 5d)`{SmERX!Q%dT$`fx>?m]Z?{AÔC6LĭiY&)DO^'ݕ-"bi&S[)1$Fas,A3e'XlR+9>Q$PŀPhbo ߕ]-ʆ=bO.ƃ[$=Mytr h/tvy癯o~l?g5@8?lryGY)N;joI*>DYNc^o\R2,S 6*(!,q罢>*L+$wMѿ#VN& .Оpd#J`''畦S̈]ULLedJ 0r$gG4qjJXhc =*- [ Y1rZ><9N3续a\diP>asÎ /<@G9IB"fI[͒T:LZ()w'_&)e FoFr*,Q(zeU $֞:n5A-?vRk'Mw6PPm@0QhFH$l&4vA%%AsZ Z QdXmceHቹpc,xguF"x]ς6j:jፊx ~sRۤd!#3 %a ϓmr xCb,D=F*!|k FzA=:Fœ}`%asĘK޸4lC$gY nţxL4R媪~ћ|9;Ծ߶͞[0CɉsAds4tKT8TJ4R(ךZ Vvp4 u*x H[ߨo%"`gJDP-R!C_H>"?>a/U`#ID'CLZZ*As1H9\LS(y>skH0u6$Nb LImD ciTRTٴ& 0V 8nI*<Ş8b>QJ*Ri[1r6`Fs;kp|]dM!J{^ڙ;t>-R^b62bk#F6Yg]J:h[};b% g%`/P(B1jMNwNlgS;AQ4kH81y&4JIWp ݝV;QHQ]P`pcKe )Rb /NW[P+bHH:J6$,MLϠ!dz$1Fx"u VRoXNg`E`skk-Oڀeh"DDD %0%_ԃ X30Ee:rAmuNXJ&"Pn,b0Qi#<*8)NJsI'U^|a᧞oTVk3TGn"XZ#:Fh΍Zb/[A*,HO*[dnN c7o9>\(ɪjӕNOф\ڙ}XS%0BT^kH~EMկBLfg<ܪia ?|2~û7o޽?!~oVU$l 퇖ņ񚡙[ mzǕS^13鸃t%88U~[P|s;|v.nƑ|Q{<WaPs*7gg>,~~wr_ gg6'/MBNcvݜ7b1֯h1쭿0hQQZҨt =no|Ŝ<-MfX:rAI^X:7'ovY#)6{ff;Df3;<[8l]>w{T*\s3љ@G? ͋i ^|׉; HY[u,kxS^J/|+/| ) L46r !#Xlu6p !cDdknX)q01:z`F V9Hѐ1+5VZQpjV@{Me^Z1x%Ҟrt]&C6j?U F!"`h0+$W&as[#!tnSȅT: 2[ǛY~^md"1Er=۵eL*iiiʉb;CZ+s36D w{ JXXQ@,쐳"o'=vܴu@E&:^PUb&v¨`"9pZ9FC?6_j!0oK/ sr/WlQ5[[''as7wcNOEp'υAJ^~"E%( 'A |c+֐Ȝ sbr;ۈkF&I>,hЉ&#Q%Nwfo #Yw ?/?;'3':܎WKx4T7'U ~V'?G; [hfx-uAwN~SUVLWhF)gy{I2Yu@ii Ug˻ 597j&/w>t,YRbԽ{*7|HY&8.fz'藛e2s? RԵk$1.l3R-{R %'I\vde:) QrW90'1JRcH #'lҶ|&/]dz\ pϯuajvdlv.wyi| LHWnK?ڝ(k1bμ"QӀcpkF]HVf؇d0` O`Xl>R[R9Jv[>uux)&"HêtbK1@ &YW{[~oL&Ζ7o}?s\_T0ҿ9e65bWQkeZK𾰞"د&eI)Z4QR*WL7NQd Q|:.:s1lj.x;'4b-&6F>ہ># kƺ7 `3UH$[熒1fɫCnM:63ұ!jTh)(u는J{T3Y !:/p؎)YfM5)5ְ'P.OnYLtOPqŋa5ywٝh1Vr}KN~@EQ":Ąyżb@/-<@ J!ZitjhzVO)hxcj HD4 OR 6ձ/yi16W6٪3qEo[c3bGו {+sj/e*y_v~%o4q)0U[ZCQX4"֪$(f E~LK>`kPқU`rTTUj>kjTimHJղVq g?p)Jp~կ}1*W̑7,?AÛZؙCr ) 콎шqp=p}1-"f'mw=y;O?jOuHtN\YgFOѡ^~%e\eSR[Lyc_?L{K_~jwy횮b"G"ᚡ=缶o%A7k"wo$W;y茤OvЦFԈ+j\%{vh3{c 5uסZKwEo ڈ!)/?[u8N_ yuMԫgIq܂f 7 zoﲅVT#](K2x|b9{F)|Ri+k3EQ&8QTz횉ۘ){F,,7}<[ǡQiƲ )Y8"}#K@7o-AZ]/7_17o:qaP[_zE Ro4Xm%?^vg.]˓zsrv0YX]f)azЊIE,?8,0[l֛YoRo66YTsuP9Ц*hk&-I%e ET1k~V BّJbܯ'q;.EE%ʰFT_xN?q嬗5͜sVo7"YCw7VY)R\O :qk,v ދtW:V{Tn|+H{ߣLޖu^-Y~W/g+wHVl%rU{_">P.Np}l/qڝlNN#2,tH7`I^u':&aЎZ#Q2My~jUO=/ڸ ;TiYJp}=}[nC|_'PZ+OL' s?^.)8E"Mlh%@!H*ygLV]nelڀ&fNy23mcBnr 8\Ɖ6:|,7"9 cӶA/Q} /9nK-٪걈(%%1 Bl s9oD]X*:%ҼndJ&Ya&cކ]o\{] r_pnAf'k7;"z텧˶]ɽU[-ډ؉l;A9Udw/-V:;l[m-v_ޒ|]co#yϟy:4Pϵ̑3O޹cCsSte΄C27 sR'ЬmW>9{jI{/E1ְDh9۰#ЪT[ަH`/G:Z!j*{GSҼ2JuUBc:cRH|E)O2Ճq?<>J-񴤳^ LI>~8ٹKiqvZy3Y (<:K2[h_<+#6d*iFʼ\,ӧu(ƞ ~⊚-.z82(r[~[j|VW'rfNyXuuORtدu&ί~Z'WU=8Hb?\/}LϚ-U{IzvgM'ONϝp'W<)uÝGkUi ?Sws;;ܽѺ_\Wb" QaL[k.H8ZQEqB7_U5&ۂGOނkƁ(CNK4X 鐼)R?.S':!}2 ߰,SXذ ?BhR%$CK`7xF[E6K Ef"|pjGL%9(kDr֋0{6 A|J5 B56(15)8+3x-,0 *hЈ K]?E^Բ@"f$Utty ,ӚP@i+ Kk?Ex#57Ӧ**J(Ȥ?SPܕ6tt#:?E >ATe"4C: bMJ 8~nW(C-VSF*9Qk_VqA s#$9K=& KQ$+P$phه}NhQe~Bum)nϽE1防>ˤaM^;kJyAAH_oA)W# d,eO@q6QFF C kKb*XGz~7ɱDkr2S!R3R|#Y2- ,+ HQF1YhDQRd%}.uOʑח.?c.__5Bاހ2[h TʑF ab]V½i5ZK+nbU[QC%(v  }=q*Cjpud{1{ OʮQY*In1{bE.qs4^ k"DsTNI=‡H*&P/K4k>LLF PSbxE-V&1$mD^7C$sEN=~D=`SjFa%E/+UjUQ 2S$pڸ4Fg'ruKp.c8~ -I?S.a?E@|<~qAK  MoE(}uD)IٍS$pWswBD9(}^&*&WȖ)8k~l.dе&`1F Ig#!d)j"=:9R%21Ex 3hdA .42I٬;ExK7%JgWTQZ1زMG)8;˾e3KZM/д4h"zP!9S$p1@a Q3,E*c[Ek QJy0,\(Ju"QmTZu$"|ɌV JѢ m"h d1ͺS$0uϿ8ǁ _WX9MAVRsfQC&b*quKv+΅LIEbF-r(96M{*)'W}h |Ly{h4iU#ԏ=±rē!ݒػwIůx5_J_e:_e_?I i | y¨ۉouԕF$EP]޵6#_efy^` L  ܖ\7`}KIR%ʖ.[Jff$3NXo)$ ($ *!RfaK,sa"!N]x?u$FⳎ:ABg6f 9j"F^HDhA<п.4D _R45h*FM`dL8}B`"0^ekBwu!:GJKPi\FjcK0+E! kTp`ЅdC IEٮ"ؒ#." 18BLØ~؅=.bI" u]$T[ I;R/ 'H ]ZqT'k%Ed QRbq.{fEvᘩNu ͠>~]g~yj#dz\ꖧx_~'O:+dW~Ʈ:T+%D-jU}?_ ۫&8Dli +@?^ gS'vνmOͿÎTE[=䡬Wp0odߢ5E|uV ZJ=}Q[r:. +i7}jǧI1~4$ U^rtE%hv|Ʋ,PL'e S_/LuG_ru\F3D"Ik<92!CE&Zs!I{ab( }VdIP ; r}?a yvQjlOMYЧZ%㦤*|þ#U!䣧OD1dKjQ9 ȭ~lH dF& F (>5!(y6R9&촩^CfjcN>3V;K%D.w玕H5Zq)lt d!hmW%P U%3Aj'<H2(c1"W4 Dv&yN%U%Wg =?!<9+\BCCkK\yޭ[l(XmtY6zZVni]\좛0.Y )3spz>kmp1{ÑЈω-rsDIKN_4pu;&u,JUgIw&7Z 'l[qTvX,10E GP-k`1 CztTH]W1\W|0N:\rPښv,=_&]0r #N&9G7ACL0L4?ep9̣2n0D+/rZ/ O<Ն+2n1F.;=< ~{㛿c?s9D$;. _{ޭ?wvkҩn>÷װiuZq{}o[/hhƛ݌cŻA;W5mS7˴v햔jZc>PV;;pkӟXv5e\] ׯ[>;5~ nI]fȡ _/Ν: Z"9YINFiN;1 .p#4ke'+z|^)Ip.j vjhX p[# p7!!тh,cB3_cοn~vw-9()ѡ%`^'V2݀/7a|Q$[Y3,#T%͢*̓ӮTDfnzAԭiBz=B7{.71VyѐkT*"a@l\`ij%u6LWTgiշszH5Xa;Ox{Y&*;dؖx;]B>i"ZT" J1lmTQhs-3UրR@;vW)Ei$Z- deȥgBrzX,֥s6Z0Z8B"2JI1VZxJMjzJΈ0Ke(%D#]v<[2(>,Xѻ%ȭM=6Ci-a7Ip.V=r!!VsS688;gU)~2P Ž#$| e9uB($4Ń\g m).˝D&Z82I$v9:c"Jj숳ӛlPT:|*ܥp *E )J+YT4D4BIdpLfWpX(y{yRfwO&c)ٷ8-/E1SErO*r"d@DծtNR=O;槺X&j|FI̚lcƕjRp&,^>!Ibg=OV?Q" ""fT!z RYoiZ,煎\ fi %e13g^9OJKat!HTk e j'J&vfG6fB#?gš+xCv[ryqn[?>v ƱMJ-sRg-dUVؘFgG#">HciQl8!i<Ēo)a̞u&:&җlUzcP)֌+,v%'~7OXBxtsϦG;jVDq:R~O}`Wpt4Fo(`:& -"(-yՅ:HϡP@.5]I9JHD*D%Wr-wVW:PƽCsJnV9'm3ҽL}UcxrTjͺm0TFԯ#JهR]kԮt])H}28(M7a,zxEy [{%h eg'O-Fgᘋ{1A%B.2K̉c2xMz`nߕ7gc?z<ɧSao)tHTP6ZK|N#MV!+:ZdYCΥ+J&rCPg'o_ " sT^&doCMW ''{ʑ{/ '7_L}a]SZwE]T|x0?qFIAq2$ˤ ,jRII:$ bX2GJ9FV& *K. 8pڦH$dHPLFe&v˸<-{BY9|d gܖ쎅vs{p&WΟ6hx,vNfE,$dBbѤ`T4*Y1P70] d˳vz;8Q eml4W \&h L  (b XҰS։yPhC{ӗ=z y3VF$X.U" s+A`Tc =zH;&Ҟ5Žr%RWEiZSޒ_Nx͛bRSCTD40YLpCX 9} q4l'1bLJ kؠ@q1'NݟP:u_)a^NJYjŌ0-J%8Ƭtat)h5]P"d[*a .&$R F$rBW^m&Ύ"71@s}3vw}v}si&nͥoOVR[pfi_cO[?W12t)Dnj&(hT1 }PHL&+/\^h"<9AϹ;R^Cp$jD2n  vJjlGZvތFܕgqQb' hCާz~EY>g)7g94ll7(}RZn\QHX>*YX҉~Տ\1NߋFWbf,'ꥐ\08~?ƇOhNqR\}.-cr'v]=Sڦ{ rQv^,[>~-O0KM=7er tNG4-OcapCerZbef):$c=2 f̧ϺGhΞX=('zLՁCwj ;e(&ԃڴ2nF6`zh}>षrdLnnIWءo%xi{S~jnGG<^YĽApzթp嚫msW5jd2քQ'#N sk'{#;+Q/?7__ӆI<лBPl~zwkЩ;|2Pr0:on^u)d=_O@%9 b܃JcZcB4]ɩD\ Ax֖Yl r[ZʞbqKT “KknOyKhі#6L~ Ϋ@c}gAe?*t UA9bJ„^/#xc6"ɼW^ = F[Wn{܉@)?wyXJ'쒦'MrVg0tݩ+@hy3zFbg~ jzݍxj> GƩc;m{˛k/1z&9gȲh-H - 7n` Q -C @I7Ol2GWF߶7nſ~z~+tc|}How{ݹ[c Kzz~(Q|3v.ZA"[^FYY(@( 'Xc S˵@b $@bkr^5zF/\{a.0"`&mk~dkr^5zFh\kzӊ6zF/׊07zF/)fkrM)\kr^YF/\kr^5z\kr^5zF/\KKKơ˵:؍^5zF/\CZ@Z9`k5zF/\krOYn#7i8'4)rew~WsuC 0ym>L~O*vsb=Z.=4T֤ U|Om28˓6BL9sYFqe ݜqO X5ΗMzJ1]Mm> ߠv%d\yKj Vޠ]| tI%:#OӤ2{zMi6|jĸ#:jsll +1+N|__HHIU:ѿ@>:` \Yk戁&9hu݁j I!l$\h TګH22jgwg},g-w!9<ٕa'ywGIS*/g4@8)2BviېI;yPd hZFmHb@&ʒ 2=)`l.EIBLQs.m2ގRb ͌{BvF ?([W} k|C玎GObKXbK#30EBb6Ơk(lL36 !e#Ů&r'qι]mv՞׬7`k4@+6@L`L&htd>cVw:W#Fu#CRMǚ]1k8c@r5rD.JXL&oBJBIҪc o5MkIM{XR m\7QK}GMqyJKVR9Vm:e'LKrV9AŸ-#Dz̘eû$S1 LDPȺ;㛪eqV\~fYO/.8LWj]5QSمX*>%>UJ1͓9d: f,sOZo[GZ 1Ig%aBm /1d+{`X֞ 7`eq@a@,p.iz t奐6*ue?3>C.vz)C]c5ymb7{h7Ӗ}> 7#d2W+]QIK.O(6|wűX65ݟz{\7 I}ˇ)9}?YxڡG{n7ߒoK'N"yWH :/_-s)D1w|Bxo٨ؤno~,[벦yюF. ucDβiSsrT*gi =;L b`PH2 H;㌗R%PiY-ѲC Y10;e"ahi /"<G˥Ւ} ւJY 4],$bffgЋh[t \扴1% <'#HBB.F5:`eIO@T2@#z2dSAΑ4"9R*35pw~B}ND%|߮2u1(:0,cFDFϒ` ^K5f1鿝?|liEA1& M"18$>K,),H>W V/xqv~~?8Dϓ0(r0~}< /A%/<G<=J k h=,awq'Ó<<;@VTFTLd"o0S˃px> o7/~^~q꯯&џJ E"X\[#b nM!=&$97w$.9 pq7Ŵ2gajgaٽϏO$v\@"Gga>Wfآe.U&nv掯q::<1Na;oxݜD BƩdb`VEf h__IvbΊK?$1wQc(S`Nb溹oBW/ U@/lZM^ \ E,ٜ :Ʌu $¢lȴz:Mt~q+͉{Kb+~{V[gisؒ\k.9U_-j;UF)ͫgV̊kJYTiF;H;wI YDZ-.2;tT: 䲇T"X-vJ:IңaN-OzϥeD1 9A)DJ^TDIưBH`7 H,}WgdCwU6fi}9${+_á4H_*-r4ɉAnEPQkLY.!s"$rmjM<= nz MfI VyF۲!@0)3Dz(Жn7P"ja"eHdeZ}YYYϪ^N)e]scLUW»%JA Žל'&qFf B{ƛ TOeO vz_gu䄷}v裓"LZ90"$1Axw:Tj'5?Z|W7$!B%%u`Y⚌<&k|\ -8H]b֥&O-f8,6'fb0WYQb26`y Cg΅2zq ȳggՋl%"|,ESv>4+:(XM})x鹵)( wbV<r_aM= skr_AЋQYX;Z\n[f@J$J4!%l)ل $ K:o{o>Ds!DxY:0hqct&WL}mXVw;m}7FGњTZ$O*>B~/&3<>{: T{AP UQ[T1zKx@*ᚓ*5] 'yF/cǰHkR0(L<)ΣrFh L*V 1T mx!HdeKbsL(GudXDaZ)"VTEHDcn[lwoI<JnZ3~,g?Q){2 N鼯|@*& UV TY*+Pe@<UVjUVȿ@UV TY*+Pe@UV TY*+PeA TY*+Pe@Yˬ ԽUV TY*+Pe/Pes TYUV TY*+Pe@b'UV TY*+V TY*+Pe@lKL.Tnp.T P*B\ p. \\9\ p.T PQRiM0/B\ p.T P*BE̦b*$$ |L_Uh2z[59._"q"G0bA G#BXYL^jʈhA #(H8H?8`zX [HdžM3* vo\}ا3];W6})ۇuazMᛣ䛽e>o_.w֤}T}=]솢 ݭe-hYzA-||߅3Wpfa˜c;u5|uz`*LL;扖oLkHz+DZmzБ[mi0Lmxzhuf>ir$[ŷK0]UmGRI6\S kS_Ê6{Sg_0 PŞ0;>! #'{ffKos >Xl9k!  !Z@V>[:6~72嚎LTvǡEx¶N^UwXANX;|,/ʫѢqfrd~|yųkPRSBZ+JUsm ]tұH0N8]khgGV*%h?QFeQ_c)?S]!J0t=@WaZȥV0#)fFZtS5fY_G-ܓ^O㳵f+e}4N^XYa[4iZ Qiu7PǓ4QXS!Kl896JnŊ(EABƒE ,EzE3}YY6ydyl`OVR浈DD YA(rR d)`sW QHS8Y+J-=?[g!XT>;7f< sjx}>Պ[Ƣc\8Ϡ X3tmu$XLjAmU9֋5Kp3 ZGK!h4j|sWL#,8ZI}q3q3ݧWaZz 7St} 3=o=?l^NHJv?j; ۜԎ:ڏ1aF`k<-&(" W)bU,ewM2$x$6N0+D{CJGt`ZD";&pKl;qy,R;uԞWwU iU30^Ai \G15&t >c}9w4&ꞳE cZad`(QHLd$&F:Œ[KKaKIZ9xlMpZia{`-%ú2hjQ<m9+%Y@JR3d%.^9[̻7rf{a% e 7+Ip,#LB;Lnލ//ǣ aAAΙ_H`OލZej-Mt6賫H3ok00qͬQV YiFO7 ᳪ%fr$IbrPA e?ΨVE!BBkk45֠KI' ~l?VO X܎lی^m:/.~}HTW2-pj{{E2jf-hWsP7iu=1jl?7O%ӊ4 6gZC0"\d2 uؒ;F]صJFofbm tht'lX k0iTVCz!`DT௻ktb͏'t؝UL~ߌ'Mwwx|u; 9ꬺ7j?_O{rbrgJ3zTmt+zՠ%.Vc)a'YtN,w ս~k!w }O]=ȮǞ 92)5[!gQajZNrzC^.oj΁RBKJl +XccmhC?[VukjU|Xa 2 *aˡ:U#eq-Re^ܢf[f8Fs 6͂j Dk&@ׄZFmdELK<\)f Fj|FSЁbTZɬ>*E\?#s EaSBU=pT5DٲW@xx݂mcvK׳Sx]v_rКp6M6E6wzb BZOa9G͖Nm6Ey5e -uIvvyx{IJ t{-7Cɉ9B;Şηt<*ghެ۴-9Kr 6ϙݔKZn%@DM3@w*q Bez:۸j#9x ](-t4reeQƣlCJg(K@HE֒)E 05︓Q>x=q1X=\ cvxD ?0#Gל'%gΩ qe3}ז7ěm(Čp8ٝLjJE5AZaLkޱa`}ގGr`إwwI^}D(H`׀P?q0m{oN'*j{˧3[^CΉcVOkW+3IJ{tz̼\<*Os\SI[@z5ڃ]{={Le{>1#+ˉ ˵X0b>{a,v s] 0'l s+9w.NJIb3vy_AZ9=pH>[K[>!XòEK&}iv%p3Vw"sJZ1.t&Lx>nļK^A;lAN-z)~_2%ou&)_lM~XxzLSm~;'gYn2ך' j&vE #U0Ua)EFi }"N$NVLпfyrZd-UO}Y%EU Ī@~6s&>MK; z?II`xuɠo]^luu ? Od<`e^cS D|GϛREsgo>|2C"i^=~ }zg  ;A|$OUqpurgCǮTt$y^޵q$Be`ꮾ0^;ȞȋcԒb`!)ْ(eDg5=U_Uץ9TxegU?\oZnvLTύ5%(,D޼`ǽkd9ucg~m__G8f;4ϛ EE7ǛA"oy+f9ogpHoV)]ڂj/z\HozoqY.o.ўMNoM̸8*?VH>duQܟ#~ë8?-Za>`֨Gn"Bui)LPp. O$ ͣќ$^e f-[7`PAp7{賹.BS>jtndǧ5Rvg5LGɸ7 τ̂fC{n6&qJSms D Z3ᕁo?`sytYQ ^7X} }sͿ~oupMe&S-'URBq$Q4\Ү0\GE H* hAvk惤,8rZGM6S:ԴPJת.ؒ:NBcojwW벣NoGe>1M䆂SM⚩Ь7QdD`uF"~F_Ip46ec`+I (+B2O[f4ZgW"%(c4 2ra JjQhGxb҇UL$А^PҊ@@ 6GR g,Cbl}D8m_BUKચ2]gx5\9d6->޺K*[y c,V9w"&$'tɱT@ZY脷NS^HiԻ~ y|2xRK(v^|%ݕ<L}!&ssCw.IzV9z m~63\Sy(#3K_R|֯aVilT!{rdZo8ɵ&{y0cPXk^ءz3hVQe0g͋% !TO1Ta0n(óc o啤QV$V! 2zD0uzZ/P{Q3c VsIB`JF91r)SƄc@[ĩL:DS Qo7ؤd늃&TYf (9l(} >ceg;}d %C!!ljYj J 3k:u9D(J&yRf2Ng.Cwgk!0 v)xa\-j Tپ8S9wK5g~;So]1~:ny-AAoM@Gv14$r&Yg2x %#iDi#TB *vW̼IMb82y,y4} ZF+JJD(Q9lqǚrtoS;ogg. 6/TrPr[SVGbgcN2J w4e:niA7Զ(piXw☾a&8Xt~\(Y<%k>"/o}s4EIهuͶxB/̾pt>᧷UZ9U'"us[_Q/w.wNpwwᇷD.FBS?t/~އ`n n?,647iЦSO'|㸒4de[gHӤwsk).uD^0K2ʷwvp L5otxmG&zK>U7~34}]ϪF_/xܳzA[:{}^wq1éEus(iJ&Zv {O.09Tֵ4%>7J>o|ŜKc8o*j\koHܼY&z~p3aa5_O/UTh&8SOuQW}K[[hMi -H\Eg[A-Rm "~gDá AYg yc-t L%*; 3Vբ|!t!jmɻ5:zhFV9HA L2 JkK[8S8.< ApF’%A:B;()W/z =/2\X"2 r*,Ur ^|ڃNGg$Z~rI6?2P6(*ntI[9Җ,rY|Cb,-n#yFz>zjr_K޸4ن$hnf+r1c^\t]gt^ٱ/K%/!9dRKŁP듕/h|MCG4ܱ@#FH4Au;%$ #)=v?tWc̋ Jb5>-S6lE>O}$F> mDCC/H+c=$E94#Qr%rF>XD(32[ gR@hQJ&h9|X@Sh8Dcs978Q$oA_!ZGPn0T:F}blAiT{q aőHpmu]%E1~=OB:Sk4w}l[dH)9EAF -* dKB*-+F 9*1 z^ ql :!*&&TR5c1r6kƻQʓb.d#NwtᜢlnIiN־Q뱟'gg? AIhwI!X 5TJIsE)M QC1Fh8NClœYm2N#$&r# .=S a<];Dk3iޙ,‹ @F'3wڜ),D&$iA9ZȈ )G+ڳPׄH+E^"p$7.$G:*[:;9am~~Y1F,j¹!Fv%ihJTe" ΨIR$()<;bQlW*}+ol{֮N:I;xI{B%RVݓfD†f'" ${@2.@8|4ɚ\9l<*~GD$XUW\k]Yb#TX!.(;jB5*hƼMLp"YIP=%.fOk+r#orSI/geǂ䠸H<;_  02Q0l-,RD[)*FΖaX#g}u1~>ҝ7}|1_MV}3};6L׼džWk?'WKDnD eMB2cy`Cm&@p{CErY#TFƭFxTKڃNR̢P%OJ{rYCF$տ{\bgAަh,v,ȲͮjP-ԮD(|ؑ)jsH+?rQrLCg#!wKN̽lSV+8[MHP&7pL@o%I~,3w7bfsw- "K6/>k{ʴU{~fz^{sٶ-Q`tY'7P>b" p @ (%*GE [JM^;YNKNnq?ݢS]K,ZxݷNP\Upq'#5$sdv eIh\?`uBBqd///L-,UbK&}@0=,ږqYo 6qiCr+.Nӭgzȑ_81Yl&ggY&y5DISll]lQ촀ر._UA\NC{]~qF{4G{{ Erjwl6osF:cayIjuNfpe ocJODOq`yl9nXwqo) Zu&:CW3s54W%~Yl9Pw;B@.;3G"lHMS#ޘ|:0'*<( hρ.0'揩F{18;)^Wuco^w WJpooq~|:/s }T__ 9ɕIb[#+cC`SžUw-2'_พBXv"A wAb"&Θ ,mZ#iו}+Sm=oe4[kmaca HzhǓѷx[/+r.L퉪 ř6A$\Fιp~3S\'@MeI2<adsEt42QLk\ui \H9%*. pWdU\DY]#M]ͅUF5 14jwL9ŝ!iH焓;qIx-- U.Hv5e.x뜡G `MFXr #blj)= @a*DuW>38l ]7]+8sCZҦE3uՕ@hs=~d?2[Vkv( Lټuhܶ]7zi\VOovu󁱆*//g7{| 5zakΆ?k9Q:zfMos钇Umu:ۜ6$sۼEt@gB-JYu{o=VMLt<8쌰~{꯽d4\ =xy Farr݌&`z'o W?ϯ^_vΈuN='STrR%PReI@1'=yFn(fW~W'uv$`#Бg=$"j fM:24R54Fep"A3eڠ!r?NT$ϭv )F^x8IJ#aI[͒ υE>0$ 2ᣈ \ j\W&h 9xI堖 :;ǺY ŁPōFhT2@C4EJHas̉P%Zjv8Ͳbzj5&zŒ7.0M1ې%"ѬC{tQN<`lfb"hb\&(@(.Ha&JQc R1P#g3C8p {B4ʊ*PR&sp- s`Kv%Vg&0Ms4$NS``BP`Hg@ ĩL+ZFjt[-f75'Q&7tL@o$k2u|YP B".DXPq`9DY:鉶6soK~NJRIo]ꢜϠo њڭgO{t~6(\:(foYw, xϊ+s?*awwVXMd"Z5xxN.C.kI f'%˺ruz`⑅bmX&j^^\VWe'/גZ6fa{Yh-L7Γ2rǤ\KnM8Y*Q-؏#)i^mڦ<53F.3 ^CQZo \lj܈kfp$a?'蘥X1AM l:6a;mzQ))qE^͖ۚu杲PXg"J}=}Nψ_xnvgݎ㤀oNT댢rwMS#N+!e:1+S2PIB9')am1x`V@(vuQ/Y 9vujP{&H|>N@rIL"cUYRB|NFtdW3˙1gzRh,4X b!!-R7#;&ƕQCqQh{V5CE\嬃$'b}=J#+, JR  4a`dp0Dw.KH%(,bY QR+vnGJ#=][Sc9+͓]Rǚyyَ .@c.MUWM`26(><ԗ ^;1AZW<0ѧ7]nO.P"hNҹ9itt, ( " 5zVAͮpz<}-0S.Gc IPb"+ R&.Eh(*9\wVLmLg0.^&Auk5SͻZ!f=ΖJdkZ[ 9a$o.nGtx4a4N)K˄G/tA0te18t%a:HJc٬OCm\]MʊcVvcz}: Măv()j{,ʺ`ܞ'7ߎ&V? Y7\yAOB kdFHBƅ\pO>D/λ,c:_/A$\M%(S")xL@'$Tn9UPxo;m'sPp/˛ ;rMHuŽ%;w)+@2< X( J}ˇo0bo'Ң͸ :# 0jv Ng5]l;kɴu\\ ceGA(S*2ն"uW,@agYV+7N0XYِYokAC]5ܸ8vt'tYqä0i<$8W LB% c;EzG8,z)aB HE hL*N8FC? DNjA)2Yc=vB6O~ۦ(cVp8[f7 =[]nkEJǮCBh'R1zZ9x=Mn5ʧ|]v}4ɛVb'[pP5V7?~6﷘[aXRgv]ni[RϽso~>_{^ 7ϭ{͍Ztʮ]):eW]Sv:]hSvuʮN):eW]yAuʮ):eW]SvuʮN):eWo):eW]vʮN):eWSvuʮN)E]Svu[):eW]æ9aםfNS]SvuʮN):eW=K%o5trq14rďT 5s!R%:-G~?8?5=UI7W2]TjG&d}n&뷹_+pyuX;s_׻ފOM0m#5?o|}~6sz 'O}/ۖ?ofuzsz#,ebJ`sI$ɽ'Z%5FIZ¢еϣ%Q6ὤ+C9^T~_Ƅ=ɯNp+Tբ36wv_-Qm=~.y倿f>]1bO1x]CW++}`ģ0ۮ^-nYKjk|3l/dն:=;t`F/խ!Y!n^Fݵ&uIX`@-[xnLC;OiyUى;iV5Y񀶢ۢn0:'mkVx:Mf$uyo[R͇xi *D tKrI=PԸ8NyNF|im>2O&Et N(6ЀUBvϋw>|j#L(,HR..\ JVIα2!t2IeSm8w۳8[@>O[WFo\ > Jz`}^b{(x^fPpc$o9ű#uh޴WhJ +&!1X0Oonݛ[hskz)aB HE hL*N8FC? DNjA)2@2]Pb@ YxJޒ])I$6Z4i e39y.,ØΖ CVnGIs/ZѦU:v*ڜE~??Ws͋lzvV>GIOnQ&9y^=撲ټbo>7 `AUFX\C']y( } Ԋ*Sޮ67P7{ok6W713 &7ף ZdN,(XtN17s)_aa~5H/ 4̳^#wVb)jOJ rnCt9VdZ^!?*-Q`/gFdÀC%f=^rQWSO)]bpd^頽!W6Z2 5%pe(%G뛆ݔB*#ҵrIƾ0 QιX$Uivk&Yhd?B|5cVhy`V?eHxC=ALМ6ױ_2$(`!G@Ĝ Xhʚ]B1HB1wqyj˳u׾o'}~h><՞sOVrË=_y۸ev f7X{׭֬0hҪ^k<ҡ~66>fu3=;Fۿt^mG1#E/MǑgF>vuϱmm/mGىoΞ~w-[Xw^e7t Eg%Î tz`aӛo`ORٷUS$_MēRIO8>~X_! ̣*_N5Xhv)L9nm1ͅ4bSVpz"u1E%;)Ilm(]vTܴrӣ:Ss5癚f35=-ThΪYĦ;Zu.* D8.n7Imqv.#jh+5vCV[w+MRR)a PbL 2duEiQC/V9bH֊m 2orOpn-sns#>&2bWYE}9m +dʶT.1 iIf Z?OE UK:U@ӌ"w?zgߤHz죺i2%_s#?mM/*ZQF-׍lzvb Fbl,"}dQћJs&'rfڹxsvcg]9mɵ8qW\b5SQ3]!f>Q!Od=Pex+9ۥ,fg[|hUR HE{j oW:&IZjɓ&Ozg.9 BocB8Nq<"Q@2jK4"o%vIdVi&UǣZ,['>ŲepߥL5III#9XsVeWKh!.&:dc~OaFz3"mYgOK" Hk߅aSPoY'ܳ6K0JT)gsݓZf:3cc=XCrtЗ8 tVϗ%%} ([ 1Nfɚ&|fy ٛ_=:fnG} knmzv^ں-\7m!–1ۍ___>sE9Lz]-3&O)fa,Z{ì>{NC4@ۖ- x˜Ƨm{rk{վo /l L _ݫ|bX:5OJ:<~n<u'/Ow_poWYmr04]}y ƃ7N_<>Lջn/wLt&󄲴դwvɻMB\_yqI7_=͝Μ\@Ԅl0ngٰGŚ_e\M.Ƹ8Bz#!Xߚk+{W-8H9ȿϟu}0o-!U@dV<0oW{ B>NABz?yh>Jy\eg/g}1%l99GkmfO %qp4*.CqD鬣߼ٟh~hFAw17_+=cz\h7G/k˹<6̤$&n,읭]lSWdX$[rmT09qXoRlww15dQ\lUdwrdl䰦҆j%Rj)R#s#ۨI:^)ɗ0Fud=~q./b] ɻ$p87czJIKPG01vk1א06 :&VVbZr6ALţ ̉퇏nՅ[*ph5J6 m.NƁƱu&[fKt<*dȿ-b.UV%s;hMl}GSX`-nF|2_䓟٫Y۷l<6w5 Y//Ȓdb)44?H),7MUJbZ5.i(%qKz!`mnF f>Dcts -w28ǖ((E?cE仌%kƮ"([.SuMI1lM7 b "w0Ex .kFߓЪ#KE ޞw ե fll^ D# hվ9$F\Xb'<^-PTj@tԀ-DBF`:^vev8Y "(g.z<%1${qh^KpmV7.-K"ul0 ͳub(?Hci̐:60M6R{PR;VMC+ ,B\_421ߪtV$y6*0k~6s( 3v70ZO6c3.ӎ]E d@C̑Խ`;޺g%:0gvFF %F6 kV;ZsLdfL(Y=8V _#4|BUaF٦Jqျ!(A/Q[ SC \A7"p{hq%N !;XBxl5TigO(ZR`[ 2G6*'[QW,H rJ'7 "cw(NFQxyc5‘s+Z F:.=x #IK@`'PuMc-07紮hcxɴ 80,"re6V =8֪U 52 ydX3iB-d/pUUb{|tkWGA^Ɏ)SXx{$J8* Y{0m@BҪV 6#->o1E zȕَu!]?-H(9>X2'Ѫb\t9 Lr0O-ҭ ?p4LZ6V|]:SsQ\U:oKc&"Q:#HhۢH r˩ ˭]0pU3&J#׈6X~+z5AF()`!Հ[F#w/e/n9>>^5u\iCY !l U<_v 쎚ӟ~.%ƶ Ho0](C6{O_Uc8>Q  I)F 2J Tl@@ܧh*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*h*\%PMl(8ا>% +`ƦQgM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%TM%З0hٻ6neN~ȇ-ТE9 >c5jeqq;]+[/Kk[KCrf8Coh@j@Z T 3)"(!Xf$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PF=]$DŽ(A<w=M$e$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PFe$PF=$zX7Kw(^W\|v!h#c.\ p K@RKק^+XSSZN}w+ ѰrYg{ZI=U0AcN/o_=MӺ(Xj|<^շw7M!KW,1 {y;T8-#x1ρAꥵ@@苣+jÜa7afW -cсe\:=u(W= CQúPtY1fK4)tkϡuKj<Yg28jiK5)(YY[DH(pJp t%ދyU DP ɃGj2v;h9X +f`\Y kF jYXXC J)8FB1;˜CqPFyr`!'ȕxrZݓ*5)('5}D ӕJ*XPߝ$yI+)V4M=,k 4k÷Gfy rE`bUbb(i|mӟ4eyգ?\ak8xqdII׌+M]j{;Sɔ_#L9jv<-ʼVS6)_e2AYaS#`vXcc$Ċ4g?L*kypx$'{U:U)YƾToo)Zl/wOR[){ŨW{]F|<Mw\p'9Xx,Vr#sz/ {0orߵ"dP}~O0$\ʿ=g4ŔHFUZ0H`*)Fg^!㯈iILvWkJl]3yT: O2˖WmV_ܿz|t~oMxxq~Nm,=LO\/%W #_ 弗 #1{]%rj"g>\ӲPw^廦u2]4Q!-FFIK2iS4q6jHcH) $VVHr{hZ <n+CP;R-5 kKXa Sh1`xe(-IHgjVȈĻ+s}-M6qD℀i&I t  Ykcʯe \crdAX*[=L#3iW';&W`4(O׋YM}ƣa?G-e~pZn>U.z?_|z9ϯgЄG׫Q^!#Ov8̓\SEE7Rϩ*&bez7Pma۟ޜ|ߥo^N^~8yW'o~x L􏞇U$h7 Cdn4oZt47k*EӺ^7]+ڽ!!1ow5092Q)_mǥ?T%_^LOfZLU-gBbջňY͛]W}rtV8s|\q|PliuT'PtEeVnéusR+& *ư34UANi]J<С^v/C&ʗQo)m6nX<\7Wgi6a 5U&qzvo?*0W3*%OsUBڻ &zCK< wKoDWDkzp:(Nlbfb}Lt,KYT`#9'5vĔ@/1>mB  yXQJ}^qC2e3TF,v 3ӭD!F{5ArW˚NduGcY%TnA{O8?WFh,kwBCSw Ƈ[99#EQHDHk d, Јj 'GQq~(sGq:;$x axXjBF(5A[F qˌWTjaHu[s ]s]ٜK:,/[^Fj/(-A2Dx0n c!`r#5AbO0Y$qn%3- \IpV8tE93K#S-֥T [6:n嬋k:Ro׎SQsKrG̣cFYNJ,;ǖ0P(Kq.EkFQoELgabF#7%)D2ˌJ9bYuґL-7f?)I׶Eȉ KZ(ZِS3JӵKQ%O8:QuXO|%eM2wZ]D9.ފ  e."YںRE]0_g,𵏑9f Z8N\]@]pE0ZupFmn㔊 :WPcnn|P3U:pҁ&EVg};-U)5TD ;$` 6<ؤU4-~ןvԙMRP"JB |, 򼰘4$Q^ .m+H>ʢCShJt@(d{ OL{@ +0qT:۪@!Oa|F&nYD!яgGo ls?&kpS}tlcE_|v"uB!`* -,% 'a8~0/vu@kt2eyQ>-v8]+i-1!C"K tTI- sT"*9!Q(M44)gTMZ8XJO"C D<&J 1:*HK[D# t8h+xADEd -J)BB">%茜ͧ 5"`+تWT|ԭ_y#k=5wԹyf),1ͱ^jʈhA hh#2&"ݱ9[fƝX>\6DvmX6B 9_娯˴BPӺ_byaf(D@<G(;1+ZB6,5;wDy xJӘG"hM̨9x@ F刊`) NPBI9%xMi\{5 (ٳ|7__Y,v#KďTbx#6DH}ٔoܖꮟ^hܰ߿O [ٍKMK>2 U#<ȇxgNq5_CEk]cf3ܼzàr@^KeRߣ#;W*0-]&A͝Ao!7ir]@;+6m .K %SukD JAM|ryrYY2Hf,'<,+➅b:,0`mW.n"VKjfn& ]$Mɧ˱Y52mP%f#)s9 g↋Lq#Z1Mk.yM\q &uDhQhc}NJ#=RFf_9ylN1yz˹d{8<\6[ZdiqYZ<9iⰲ0C0)pO6JnÊ(EBƒEmClbabQ|O7V<0ydylDp $1k޵#E/{cX|ٹvq, 7d-ɑlad$jvUWV @l$"Q4 e!ڱGZhgJkkLTEp>{>Ɓ/v~%عS;URZT^;RiP;;oZ=O;9?s83Ьv4!wKFa#M+"U2֋wy٠tLA[^IeV2]XTr4Zy*iW_]b "z?‚dCqϦm ?X{L2װ>G")St>~yҠ0tGD^P~#:Y7ū3+Vb0o^_FonBke[9r9.#'}ΉP*J1y%E2RP*xMfUީ>V_ВA$6D(2C|[ K ;H3 AxCcYSƂ9tb,e$%4_u5\>_e/A3>9; ``|~*׷>7%zĵNkEfM`\ng`W54貎:i  )MT"leHzʌdâ 2#ClEG-뚔c .@TtlCz٬{~^ *CшcW(kD9hA#bk$L (rX`F`,t`AkPJc %Sc;)Ȃ5*鋱,nT=CiVe hUckY##>6 t[tɠKZw{P_sZ3T}2/"yy@挩!Un?rVRSzjѣЕwjWPtjaM<丧I!Mi4cO-c0;C,)KJ$DҥԌCUs>lcxlk>IWC̒ԑIt  3k9UZp٦oYiNj9*[xexz~TyLwTLUkn 9Ih8ٟSvk^덏qz]0} f{«l]/zKtZV%vZ*-[ڠ3b|pbQ0.tdP* }]Lv0mJ/iZ2,\B BPeBtRKOUVrln9̶sR%5jiz3{VVnYeo=kc}ôqqўБxxդd42;L]Buhu:[<F3p= '^3/ZSV+Ƅ3HʒE2&)AŐ>=m6Oxx{Vp8[f7z ov1oD%m R] 6wA&o Kx뻑Λ\֫ [nusliqnonz-pz~rCEm 2CQiZ*ILb.9 ٤ [B(Y/K"׌-=7|~@NQ)ZDʏ*%㥕g~ 秷?E=]G+^5/7ܦedv2%)RHE_Ʌ&E 724l~B"妺 Ix^]<4< jU"Z;wVJkl`F <*=Cբyh:ޮ.a]֭ZWEN>K/-TDQL$Rd!cC'_qZ$,j l},L@Tta쌈fƂK fp;h}g5ő|#GӈƿƋ.po5_OWV߯az:yu]]7hZy)WgxQt\/?N]MѼmOկwrZP5|<ˍ]?'yt^ӣxzq/Go??X_c_D%=H$1x m ͇V7}لo-yŸw anƽ̿_|R^\iQnL뫓فwj.c~<Áп5aPsjT?ϿųbxӤo|Iߴz`k@/:ۛ*-;zEpXOtdv!IuV4=Ԕ{i:K|=lo>Q>--Xcر3ܼoo {M 8Z&.o:g:ҨGWg7q>/KcT=Ekb5~X1~b㼔8M/f1^}. gAF.2$R^'MEV/oa|Kh10~1Al@BҢ&ަTdrڥMbPH3oC =vBz#Vya 2j%|Te୳%XxMmM|u}q(mĎmDbv̏ :|ؐyk =vHO"7a*JivB!umͲBy,:f5m 1dQ0~i;͋W=3j&hT(.+N@z' 0$7gCF)(pΒY(sLLAS0UIɎcx =j?%UzH4 rZUd#RT(>,\Ȼ lVD<6Ð[hOe7d+H RbfLrYM'y' h4]T|MuBki@|.m,ƳM9KuPبeA=[g e%W9[y<kO"KHD*A #eT 9@KZ{S1j%oyR RPFtO` $Ź}c0Axu6Itm~2ňP,BZa"KL;Rj0 QSKԒGqPf]~$YI\9H?eP/|hv)9-D$(IFf_rۀQY`8+Jp&8.m lebJhslςx铵 Eb(%515#g3cxy~X_ZӊBlgdUeO>F9*y\Z>|խn6˜y Z^">P!UFj1H Mw^I G%lA!3cߔ:u|?KӢ9 ]6vJ hsRBbOS*4~w @ 2SAbZ\е`m5@ A?{WȑOt^XŢ}HޗzuQlDzg3~d/ll qwŮ&zb` 䢥 ) IE*3xtI(EPcLG<>vg秿y#ggǟB}a;qI͖1z׬&l+mXO,L-*aSHv`U JyYet)6@M-84%f7K5xɰˢ,CqQDTD=Uu:}3q6[< dqVe]չ{|>żzb,wYC}q'e2Tҧ5Js!v N )?ԏ!|rH5nzztTE/F$0K;0~^)=_D+t6e>pݼug{{W}>{(+:_ihUYnFݼ؞'MGm 5wl |1??:W<|5nhװ˞ye9 g=Zc╍FO&n͉iJ?vj1ԩtPOR[Y@l4۶jӋ9;ňrCB[!iXݾeI5et<>3c-EXLKHNV|wd7y']*iyUŅ/,R<9$ 6m_g;֬\MܬE__ɺӖ*LUdd_۲2D,O¨8/qKbjglqJ$vor׵Xd>D:ICJ9jTFz?x@2&)d焑QNQlg3 8~yx!cW6AWJbWn N g%H)쟐٫L2PuĹۅq9KDҀ@Z+x]d2X$ Z k%#3RRc):l/^Ƭr(\A#hH El5?u02x&6X M B1:#l<ס Q⣔۵sp S ? |1Id6аڮzf_Nl{H$K)Ab>r@¢ud|DST0؀l<z({+ol!m(1p *g*{̿c 5& 69T@\]mՠ~`۫|WͲfnp ~ɏC?+g~^,`g'gil~a2ܹdO4%'|i*{÷D-' ; v8E?T , iCE*]]%Վ8V zݢUOo5{Im%ڍnw,m:=߳R1dՋ1*gUf'xLs)]Au`@#zƲ=+3O־=d5BMIrtTbU %j,ŨR&JQh@2>J}|a{S~$H}vC}H Jbԇ4/Ly(s&5`Cdp.K(DA]0 TF6NwkcMmiL>[@C.Gmbu&,ѓ:P٦KNHHk4C]fjfqo ´L9E?kOkĬZ jT3HvDµoiy9u>ͩ5*0JTZ@DWPr*tA*Qv@2vHOb5ٙ50cǶn'r?#kW1Cǜ} NR)!EI,U)Ɍ\:32&j)Vu,4E;)dCfMZNa;뼊VFA1xtVhT*!lPEKГ`*T9bNRA526{d);f%j1rJ#> U֋^Sj~Ynޥ62O.8b3 usNCAfcf8@ H2(HAj[uD}< I Ҫ¦- %, GEe [b.#uL+q6#v8=I+PPv Q{dVaಉ&b &D{ v p:2/)d.K$Q:3*kcpF{ %uFƚs\L$";oٌS,[q(L?ED刈#">*2@m`J^b( %(oI OON a-XdsC%))D5&QgG f5FflF[ոl%nz Yۃ^/]Qlh4o{ԣ^,|ѽ{Dט޷>,/c+H }sQ`GG|15YDŽI[ehCLj/mۮmަ^UҒd2LI)ۈ)f|H1e,B-^Q*݃2$i"dKna'dm%f>!_lV&[=Twj8r`'9oE:kM3߫JbF#FkG8Y`voqNѾTdiI١W$B盩iͷI(9\=M`4;nIJI)V<<W۾zO=+7pU%bia nWʁ}+X+7pՋKWUZC*Q#\G">|=V\ /TC'GVbu}ndzE,Nu &}(_fr~z14hN,>c:/YOj}h[][Рϭu{,ӽn_`JaJҕhȡ#J\j_J wFzpe4w+0I~ \w[yB藫ӧ.UlWIk$LBhP,e u ujVO}r>|tH!M*Yh$E< cD:B!4n YMcO3,?l1>  V2kq]W*{mM$?|Q&+h2X 1ɺ `@Qւ5eFcuqBxoS䒎)gY[> r{}REU~5WC 5+F#OýG_5b@g:'WuB1L&yϩAOԑg*O֑'{o؂U B.| T$̈́xR(Q)g#e4E6M hSE$jPp$%D 2*kE,A!U }lݑ8wen_M |[=.vISyls񯯟ymNcWq0!t%P*HJХ\ͬ@HNx"wHmUH{Oӷ@XSk #V,F{Pʻyb4 ڍE=}r'g1wư ii4&2;Z@SdR襽Nas]'2` dl>?v얞0u_3pWZ½%iP,eFTN_~/Z壉C v 'gHÆQlîdi^w&w0LeASwuC116Eb(defkT)`r:rVBTu\߬fhtN!D,$x(Dr(%E$[m aAT{V1iz[.OXozUt$'IBbκ̧< %(Cj, ]mS9+}ٻ5R/)9b?zn.6.bg/LʔdXch//ii)pyRVUfy|IFgOyo55bH1uppͰbc|Ҷ:V*.KdH iFaRέ <g]Nח\ ֡ɟ']1O_g?Z׾ے5;X /m1<惼zv@//1ׯ ]K5k3pYg?Y98r>>?*rӛSˏ?~>_e7ys? "6-yqQbO?m}e&]zբtH[0W^9u8 Qr\[̟gxuy:u5ZO燜|rRz/PAO6_{!ם5٫'m'~}ߖƺ8JnfY/l߃G~IlC% s;bǂ$gׯo::ͷޛS~_2~(q+aa !ן[,?tDZQ<;|B6vT: =$4[? 4&:$rzUNՋYD`チJti _%+XUf]VAc۸,fG-/"}ӷ\S}L&A cmUB.0{dj?ѷon?i-dՇ |v1*xa޶ H\R)O ,D X9rkn4+WcM'앮b^j ؇l:>Sm q-\;>I iY%0+7;EQ Ҋ e, `"=DeYt<61߄߄N7/.zՠ|r&TC1A%@G]J$&*ٔ)(DY+!!h.E(o}%/R Yj wm'8Kp11+q-XF:T,TTɂ vc@n"n*3hmV)8Yn7\/΂';{ଷ.ɡbH~`ɷp[U1XPF'K[%Xj_;K&`G!TI,^AR X`U`Q)vMl|va+w(:7rUЪdV)J EֱcE^L ) L |x)t;'_ٷrZV4PjB@ XK6c̀Vq |c"I_L2Nʪ7h{jb,NϢxCL 3 &+j^  z s9H%{6E3ލRvӌ=}w %jvy66d-ηX+>d/N._B"g CZE% l"! Zy 6G1cB`cW.UHKN#FM^{Bw^6.;)l*1m6\f2 +:<%֦KP󘘂V CmE)(&%X@`/N*Q.+R]0NZ&v8'q.1#vӏ}=b电}C'xD O&@+o++Q~ 咠Hιl2:w1hϩ?;=!)ctJBlaHP(Y(jF"Ea N8<䕃NHj &\kDZ5Z$ ,ɾ Uۖ"fƂA):dY9j7qv =+]+PfWB|}K0{6ss޷:vL'!F(Z*Ĕ%H`bL gF"d®QJR%jed)8B-I#8&QUVݻ݂oYc=C.."jǞ}=~m%s+&fD҄;L8k07_TZ7UHio{ݫѡSZ% ΙRZ12$sT9ڰM >8q6}Byc^󌻮9Eg%κ胪рQc˰йLh red[sxz]^@oY:K}n1ZH G﫞q ҽ(A=Ʋⰼ wb!G1jL2O漯̊jYQjt5OudٜlsF?RpFQ\,&FdJ iaʚP7 UY`b<ǔ}t欕rq ;A.KM\Zc8έy$9f,/avvueS=6&MM J{KﲘUd"DUUZf!RIP%jP$V]mEyb%ԩ (o -QPRU]8;| A&KK& UJ8TPF;&4RT(3mlk* + U468\XXk,$v֥E*)TU&Bɩ1Qt[ h\RmW޵q#ۿ2 l[|?Y':MneE-HI/oi^PHn#j6xNFJ dSnlQ) l$YAwJ6twџeil)>QzE(D 9<2x%HO$SLM(8sU \eS͗1f-E?]6NzW2d^ш=}-~ݛCW?~7?Ϯ+c^xm2 Ydҁ}Vԟ,Ĭ{[ -Z=vH뉐֙Hzhߟ GOWJ =m W,WN/q)8&Yb "s&x-&#s*F{)n{RJBI+B E_!a8+t.1x1^}"p2 c7?h+h~kdmШTn|pb#.4`VN\t(v*{GՇj"$+@22q9jƠ2H)@$i+dU`i'6g v# *}GmZ:b_ ]}ՠLBc 9M': G1RHV(H?b<;;h{A«7{")o^}O ß.25 }~<|`/Ϩb;|ۯwvnS̓X;J97׏#2$YVKnN|-zu8V6cA;HҐZHǖyE<,ǸgK hw\~NS?w=EM;A8t.(h2{sLuzc`f7~ jt['`R.ר GR2 /46{V8ck]U*VV:XoTB$ k<0"`UQnή/f Ut,`Y1{KH1xrYJR\ɉ d6p0Ldql~ ?|:?;YE&Rc]}izXR`8}k+boq BfcqVYlZN _|^s"жI70ricb]s o۬ _̅JӜ[ٚ6e5+_\Crz3RyG7 N}vk.vm2f7޿%_xɰ'hUNUÒģv{g.;>܋'8tۯ}S髣_WtD8zG?:_z˶U&x ?bbn~M[Nmqk_' ^Wycqpt Q܊zm:=䃷v:x6c8?jQ`޿^nC2O&q9v ꁬ.Ƅwbc>> 7G3n,)h:Vh1cnA㏪ںFMϚ55O,_fZ{.1~ȏ Ig?43ͮ>Q8Ө5øOߌpxxuQõZIkt:nuVe)nwr{uUV(m jL2ȐJpbTHNZo9ODƖR9 6gRc"2 Y%t@ a*O&c jΕOdDT$++߁?H'+궖Ank e6(A"j豱L{-OshED"ݾjL4Y^w~vB+2P ŽbI-_۲ :!`KKmB t"'dLd"¡"Sш(Y𲲟U3gK?ۿ-S}sۘpUpJJGUPΑ삓Rړw.P._J-1%'r,$"1RT3IIOz tLIB1VyG[P0's-JfS$O 1ĻQ%pdY>B&VCyUq|f. eF̘g`go:o!2 _є8_.rO盟F|4[&j)xC1Ka@9zHCʚrD눯= ;zKL^ j.:iO)o(-(N &OpcكKYx)K?m.ì)qh3QdRp&,^N+R0XY̨x Gt۠LZ-Ys4q,MLIY̌l)<'kVi)"dV̘EQU3g388/+1{B]y-.BYp!2{䏏_g]K~q3-sRgm̪̰1hE|ʠ 2g 0U'НVq z{!%&OӒS6p.ABiكCp1XhŲlCC9%tUfl.G_:h4`H\$istNJ1ZCjͅ$;" d@)m&L(Xs.f`EX0\;V͜i7B~ Ql?Y4=j2+zBԭǠ6 @Lϸs)/~6uʋ<:ZNyɮNX3e_#.33gč&WQdAHB^B<:e%Kղ=osn*4Yzԝ#Ͽ? Zk.B$کO㠣fp*f4 "*[E/o`Y9kDF!M*"C0Q G]rFVfC%L>>$"@YOrHH>ox\nAgd; h3l}$ED27ha-dь͠dIi) >C4]xԅ7;TL AXFd,!f6ڧS7u!?phl%rg[iJPM0dÃYo lq*kI2iDqŜi)C}yPԚ;  Z-8멪^Mqm.s}7ļ|eΉH4%ZR\B`A-wQ9uD넛 Еsq2FHj"m*( IS0$@y>gC&K}dѥB(=pH!xpP ,gNXCgaLA/h(~P!tCkDkSTE:K ?"ʹJbI3b:,)WbT?9}Z7SJxI0Jl@`TQa}<A*9v@sl}>?Pګ.2OWǁt(}`X@$(H9wXOCmM3zz) Tŷ7Cav kV>ݴ8#_]l{LLKBzr5lZYy[ ÑD8M%,x$VLkY*l/b@8nۼh֝[V>AJR@p1PB02If9y$6GQd}"!N6ie[*O& V <\`:Ui >ɏؐHR gsSQlE8W?k*Ӥc8fQFGυV(CE,3B9x`O?f*st_^ פ'r/;Gߛ?O1C9j{ SgzqG"ҫ!>~lh L>]@Q J bH%AT2 d7iCԼ1}z}ydO#mR$v]9C"e`TȉMnpR9&͑Y]#Vmf3p#??gR@w\MCƪGEf4%g29๣*!E.h[y#87q*(#Ѿ~1qOqR"ֆׇ3_LOg~4HA'חeP; YW˝V6W]S?,GQT 2(NdN'ī(P9uyCA(Y%ueQjR!9G 8e=k 8l :*ZJ؄ U[J,XXL3BN$~H\Dd]s%AWzNjφp8<<^8b#qJ'M.)dcH>)iP$ D qİ΀y)Ej5SYyIHeӂR]Q!քH+EDX:*k a1qa|x(XL?EDZi=">J,믴Ar,w')䇙HE"R:S'K*7"ċ85I*n r5]p^C>iGVQ0"g="~%P;?Cۢ-m^|M'/˹A >Ɔnu6|T]MSMgj`>P;hڀ`7&/ ލRXxߞPB7ɠ%z]^QFQV* *AmB;.QOPڐ+)1\V9kU ޒ{Te5.y_8:*~@%;4 ?{~;43KUS*NoiWJR水C*CMT\y;Zo:۸N3QKz8Ƿ򱮻k%o7qzϻ12wpA{ceMxnyb@2/FFQ8b5?a;~&[#z]'-%;|g$2fY2x;ZJ B.K À)_=b_ 7Mztu\,YbG7; OxiFh1W  8YԿ:#3tl&$]/ ky>Pc0Ns U|Gtd2f1T?I8Yݜr r:i¼;4Yۂ+6^DP\ !1Y U8!iضs1 ڙɛLZ{Pxah7~[=x[WW/_ kO)&Q9I%8~EG.1?HPb݂|JKFQ$H!8(+H)dll|*!dP->h.Pdt42G"XXy\b@X 8므Z\~g[N{n_otSlBLW%;));WZr;s4+$Yu#wH w?vsE_ qJWVDETjd@Fm+'A"*NYbw@f(VA/v f,p%" 'tga*&2-F"=Ɏhb?8PQ2m|lEԜ<|bú壗.队V_ܓg /z\pֵcώ!z.>ìH8z/i*K@WR8@DyO^g.vp͋Qex*#!Dn GN( w\9NʐtBIN^$Z[-3[/wyӚRufr.knWͫ.gC~#JѦηY]LVWk()s{,]atՔ^]=vhE,~xVf]!ljWuzo UݠN0W7͛FJnM϶5/ƣxΝVZPyE7͎pMܷi?W`R)=߿( ̂/'l[9"P%Z3+"n˭㎅pM- r:sqK^m0ݢ1]dVلuw71bCz]1e5m8g7O#R \yl|p, |Y|~e4߃~ [nRE [TJrߘVBƴw]mcbֵ}30qˤF{<ƞfh\QF9ˋju~5 ׼ZPcQ&T; $Rq]ؐ*F)x%x ݽM^%q+Im̓{Ԋˉ\HiL)5кjy.Bkx~wB2ȦyAbr.Fyk0ʐesaojl qi߸to@cѠO,.Dz?MRqG#2JPM>Rs,pWYpJ[).-m<|߾7grn-fURy(0*TI1F䏖Q9H"F0FwV.'_ٙ'9颱As! '1p.DqJpRh)n}s/[}y〼9?՞ؚ ^ώuqLq޻S\ڲ!sTC4d48h}bID0·|XB`ECpܤ2΅~r7Iψsٻ6$+ ?O[u˺8OއXcwe'6&*B҂ EKhFC`,.4+O̪:I!(YT̩fΰdS:'o3oءP}j׌'O> fa}WeesHlh#zt@aNP:<4T~Go:֫K#(T=0WgS'HҖ(%zǓDUL9sPYF9g̱Eӹ4ʦ?hV>}Jke!kQdVFxq@yOcBLmE/.6?M>SեW|7~=%ڋ~PRϸ| ?!h/tbia!N[ҏ/֖\[c[ptITKu-ۺ,^Gwpd}쁌K>Yґ}ή>w,d_:=1fזs L.~jގ~+E.B4KW`LKH^ muhZsYV>R8\)I]HK lc"!AZFd#vZ ;j [iTm_ll[=Jn'>sz^MfTE xc\P0ͥ@oʳ`/E-T@oX%7%=)(}6&E] sgX͜adUaaq JV ݈G*wnE _ʷ/4~2O_+#6QT!r !6V!T*È"@ecȜuX=(DRؔPh&SlǬB!e[{I-sgӸb jWgFlOZ"HQ0B)ȵ)+2 w{.8/xAW!3䊢 !WJMđ0x" T'}e3 d@2;˘:m$#.CPUCu*X~EpӬ~ W?pEUO~xWcY}s]cLJMhʤ&8xT"q r-<iA_R%(Ol,e¬@ZiKYBs =jʨK@0 *c"qR pZmd:0}f2fΞ Cs3<Imr%S1q6AKON=R͜aʳɏ\NX8ެ[(?#E{CdfƾbsW<$+Q;T<+J UH{IicXD( ,T˄N^^/YOi8M&i~\c32QLkՊg2$C4L{QK`@:ҵ%\;L*icAV1ƞu.]6_?[YPKJ] FAyE q#`Ș*]L*1#"J_ #0 2GoIH^ ẽ-#EN˰IJ"r ]wO7*KbT&ey,h9/AEY P[90ň#lGLT:[ bjU'rŻbe[Dg vsL?79Hb.@$(1/"'&#D0.b]9G/A 2HQ R /5z~ :,fc}0;q![mBʲP9KZxBz"dJLBFOHfy*2u˱vzx{8c}^.lT:D;,̵Wf57NW`^bP&u =k5ܽv0?\bYXv )`辰7dp#j_/'Juf173o,ȁ3TsAxc <w "Ay&y:rc4u]$3=#CxxJ2'24h(xmZQhr(nf"SL}<$0 KAIHkU Pr0;f<'I_C@>YX `,n%-MFf:hcVXnHgxt0@e~vfe8},8sseg>RP3mx)kbDeb)2wkߵHܑ Ӂ'JP=z)_N_j-NRPOL͟щ@ZY#FyW217eRfzڒ묠C#gB?[;ç)Q:QD%ڳ>)18O{cڶxgr{ON,:j͔\oc2W} tk-[2жT։VgZ-^^/n_>&&?*Zm`q AX!ȫ3^<9dyc׆p hI[1"3{/[Fwnd fWt^2ՑMN2K ;+՞n`)jkde>z--.5շnVi~6Wޝe+ %kk[c Çj= v,:iy2hl\h.osʍXbb jB2ž$;J :_|g` hI¼e*A*-#70qI$1DO"!RNlT `;6>}+ mzC$x OmGvt"vd5(?t?ݫ۫pɪxdt Q҄Xy|x㴊Iz'jGF( UP- " e,X|UfhR)i2"*.1Rbtp%G3DkQ23fΞHvfvǃ:߮22ܣXŅ{\p@h)v* .kYa7\涽f'x!=1riKb]s h?;vk[$Jfٺ`~NOT]ʏkƭrw(%:;,=l[LXrс wv;>t_`_Űq׏p?NaF%:"16?Qlh3/֣*oR%2mlP h΄8Fi'.Y!Ôt<9qƫ p['9d>WUѯ"L]a@$J{ϭ?ғ_7l ÝA8\\cF sB0*p"vTܬ{{'ݴTdzӧ`xqn@nc|s0:n5VZ)Ts;XEp < 6Y$(b &X+<$'1y7})Mc;x5+ȸAsܠ9AI?wd>H~vd #:Za{.;2@̈́Rcp I7eq8qI׸Lt-!tI4:_ 5(vrT9&A!W5&dGJe2,2vYqU&C!8rg Y -6Lij  ~Юٱad{^;+^aEuǝc'B(fas9IgW y,o }.vl~he~#(O,:#u<\|{k*2+} -Ekf,+r)~֎s]io#G+ccy ,K,IDQ,^*+6ͲT#rvgś$ WR+L~xӭa~˦MO^G@6wr{tͤb*26q2'[M&LOѡ*#ըgbSh4R3t&_a_]~o?鷯>K%!SZ`~{ϓiUt٥5.mzzbkyueLͯ;LwMk-ixzc\Ûێ+?M>}og7Nڥ[{wu\.r7޲CMY2{.J I;*v ;B]zwgߧKgtB0%8onc8pFesAb]Q魾%gf7^!_Զm cdž,|G$nf|0&חq$S?|k-JHb*ӷZ.(fGѫ֧ޥϷ$lZܛEoAohfCr];1ȹNR"ff_@/DB`Ydb`VFfh/+ntꯄǜ_s .#!џ<O>Dr`1f41 ݷ/23NاY/dF>dp֤@F+I׫$Aj$A;:l$`9ؐiMuM4Q|W[nT[[ l ^}/h+M9 @Yr}J`i(dr$X&DAfhEO(`ΊzV Κzvb>AԘ:%xЂyAt@r(D6Έ(x5U"S^YUVJE('{ g}tII4C6&.;IL0Om2AcIhM)Y"8 UiĪS'*ıWj2V:t22X v/ P:CdI61eP1l8c4T#k^u\W{9UǬ`Hbl`҇BWNWRHCWzͩ'SRHmj <3]mfh|`lJo@Wm k+]pZ. Z J=]@nCWPBWSۂUAzEҕSHa<ԵQMs2es5NqU'y8x߼3]si)YqQvd 2mZ#nP.D1ARin7*\;'sܜ,@ǼDl<☉4rf8O>i/Hߑ-'Q tc ?]JͿ"Z8kf-!NhLb$˧gT \mRԾUA =]D ~HqyUCW{OPJ=]@R$DW0U+UAk`骠\zzAtePmI[-Xcϕ JɘyڪC i荃&\>;]Z_"]Y@&y(tU"w*(eЕYs+@緮6l3fp'Kj#\!Pr_te6+նS 08*p8*h+Al3Ft J]V ]\ԡUA;UA)zzt% [{}G5\-h |`qM`_te7#wӼV4i2dR .ӽ_ Yܽat4{oҝvZ MJzd5lë. 5.:[OB`0 , iƔ`Fյ@P;Wfk3VjWJd76jɃm- [%p2Y@'wd'I&BgVxf !|Ϊֺ[`ֺKJ Y\طZ.<\ٰꓷFzhiwN"+En1os9cn10z-Nf-釼z;gί_Q.ȄJ/.YFD4#HoвL ̣]( x}ޓ㐦hiЍ[y[fkv\V xiͅvjȖݚk%LxDb5E$ UYAf`KeeFse U+n )9!]֥H^gmmF2W5Ƅ k(}f"C<^q. QeCzp$jp%eʑvDM QAoCwh-㣣h`BM}-}vȾ7oU `Wz"~8ٰ8j^Nmgogkhcvmno*Ma87Rެn0;goT-ޠӳWeR5) DAV< o#D:3E4hWRXkq)(Z V.Nwb|;)%3Ae3ϕ# 1uM<^5oM{Īͯ./Wt)"\)qKLÛ*lC~+h8\5:0V=2(Z&te咮iUىYoo`Ӽd. J&]7mY ߛzeY `s8 #;+|ӫ J|`/WL__Z'.Ztٿ5#psX2P]pǚ(`vu+r |˧"U)\X,xud V2}LRiXB(b^;e ^IɄl4! KI oM)z༷1q,F3'&AUYTxȢۿ~)!]wzZP{0[ r%TFHeQt:EkL Kdf%mH2AK^A!'% d~H 5v%ӰtSuӷl*eO|j3}HI&ǝs.}LfdKJ*+?\k"b2I  ,&@;D%;muQRg%@GR̂QO &ѥh2!)*B82TmfqfgTj% V mυOʅsDelb-49E->< ?@../G㯜XyCY5Vl5bA|ӆE %f2,yrFdP= A@MtPF 6L36ڄV2񈩝'W q֮&{[+$^)4Id1HE#e"zɹKynm" YTo$G2H^tIVJ`"Ñ8p>fY :i*a58a>,;m\dBq0pc(Yċv\YIJEQEϋ=/>$SΤ͑H9/Iy&U 3AɠyRe9FDHIǶ|'yVs>}F>xa)Or`ߤV?#_H^?DIZϞLޞQt9k$ϔmZ([-х3bvySѥ?Q69ҥuS`\:X2 x cOiJe!v)gCQ_1'] ^DUڀQf,ac"Q[2#7svu58kA pwxmM~j{g2gĠl^:tDp*$'2kN^FNP\bTRpA L'q0=g,T:kD  RclKmwؘjYtwBO-Ga=ܿY3MLs>swap]yKp$ Q }rRNV AteQ[%i#%w%%^0戍p鼤VHya_ N־}<y_PpW`x[MGENH I^[8 泔-udF!D1zyPXH͍Caςm|놣v>3z\LjU_|^5z+h&Ӣ֋J$*u2QL>?Qrh,]=y91QGYٿ{R{8dD'+2㙥8 `>Gѻ`I]q.Ha Dsͼ EiH 0Hqzb, 2Z8;9Ipu;Ho)%9 x_lr9 ^N?D^JK@~m;KJfMAG$sAxCQWYX1 pi_PHB[wϹIt;p4EKbizbxx64p4C\r4s\jAp4ƈd.I%Fɽth4m#F/PLhB)k$;j j9,B&G 3:X%vʭ tXR[Qͮt\ǂ_9Tҗo}'t3Sp~Q:ch_߉=^."URs~ׄU=ҧ/sF,ŗ&߮<R))dݽ\knv7?n~e#S(9edMSA|;)OwVҼ"d3ɖ-ac6oƅܻS#RЖY2@rxV2QT[)qv3H &/_6. 9Nc'Θ@lsQIlg/Z?VW1en~PIV<dj'ATYg (R S6؇2G0؃iDa|\ %G%Iϰz ŘCxf(T[0HϠ43GnL5Bgo@V Nt\ݳuq#M5 t2yܬK"i&" a2 ,lQς_47r/~O޺ٻ˄"$j8 &%|Iܫ>ädKazQKG(#n;Rֳz[g__ ^5_[0A^h.i\"u6]cm -?C^/.[nWݻ\>[*t+ !7֐f d>9/C.f.wc}H>n7=[=_7.Ar|զn.lsnw-ŒGtN,fw]]hX><|v\XxcKJ|P$r֕j"ق@}e2+ϙ%Lo`eAS beΧĘWCDnή =B/y||X!1p8(5$E)JA9ސuNJ%[ʋ,8[+դK\|Vr ().&lr&1孔*ʂOVQ@mX趄 ͚6qZKІ"W0DmQ#"BwoI O=M-)14c u鯲4TF&F;Ksl8q %'HLW\y Ҳ4(sh|Bd  rTyd iX# ` jPŠt=r}IG&Lx9K&Vuz}< '.G?\8~{:ku`Ra-aYUd)Y,8`w/,|%G8q_q⑭F3#1/fGSzTXO*}GN1钲eh*S1<C!/A4jSMzs?k:`,rT*|~or\MA\j́@Šd䕄ǖZ⻡Π5V)1"AsK Y2.avUD;mUBd.m|},z c)Q  eq腝 ;Y7s6epLU%ͩ*hjࢁrDF$(#Eaua6*=\ ||.7'Zk2oyʱ9 Ś@ L TKi1~T+8iI_X 8/4w dwhm&ӑmBV"ptY-udl=XeLř8t:%y8xr)hu3SAר@Woˈm̢u"[:2RA{@V9$QZ%1cFCkH:Wp~qMƮuFȶޡ;{~I_mbMx,2-O{(K"N F0} ~:bfotʛSެ%Ny Sn U3U+GP/pլ5wjVp]NGp%ƽf.}f5sfW/ \5s- \5kCp +}]5\ָ/pլE;\5+`WOW3Ozf%Ҽ?m`H\5jz/p%Z sfJD0x47\5k\w+p R,Mˡ<]|}Jٲk @_+<_n=ɰkv_oE{`?Ap3 l:=wnVz5`´Qh#jU3Ug7+W@u |Àe==٢ K94Uϛ<[-YOOylnQ?3"kb!)6Gt>^nqnɹϫr8Kڌr/sMW\X?j ǘJ2jg9Yb!DVMhϋ{D~-+_>s2Ij-ozY~^ } ޙӑ^΃_d\)Mr+ cjqHo!=CtHo͝!=Cz{Ho!=Cz{Ho!=y!=Cz{!vC޼Jȉsvʽ`S5/{wҍM/* {!R1OI!ʼn sѓW詚TQyGfFEM)#neˋ(v4e ցFHH!;dosIf?90R2i]-" ⻡Π5VmcI*l'%(-?;NW0dɸUUH{Ѵ(wP5&@PKKg/^&}n L:O,ǩ*pU(h QE2$rEEuQX]pX9J pǿlNi-|#T5p8.rg`)\H˅$ZxI#NbokTpZ6&z ʀ7o9#"rqu"jZW6pn:=vvk26Fȶޡ;{~I_mbM0iytys[V%wU0j㞨6 h-*6_bR`SMTN!LV+`x>.ŨrvxkʃKF6ḳb* n/ٸ2c%٪vf/کe+VQB0JU,Pf{:jp68&jS$hSĥ*gAyO6kP)mPۊY̮93;ijy-sng-kx&P?'6_'[%M T$w4MrVY,xp18X{p㧣E AXT9Uj+lQbk4V6Vs BZ*;0y8%{˫Yk髧 0Xbv7O2>^|›Giq\OL."Dه!85:8U(0 H[9n[{}#1L ,qdup5+3:6]xTr4r-/QESMȩ`Ak=Xr Ib?pu9>*)Fa^6m`&딵IM9Нb7sW Q?ORE&N+gH9Cf/PO6DS6F(C\i S)Xr1!ԠSk[1J"tr2,^-b4+!esigME:v1iWCj]5pqUR2)\C5Yst|-گ&<},y>פ|Xu5U28(eYSF,֛Z 값*k ʅP|_{)u¶vߞ 'J7=A7|={ h}U"Ɖo\V)jn3y,D?ʽ[uQwo]n}zٞNi=yhL*H=ٝ?jv{F&%}i+{ vfoT⚹JXZ]%Y jĽ@8V޳HMvqz\K=49j"7}!x33AOES()?ET@L۳ʏ=>5!+0&Ѥ [(Am`rUyAc#+'+{ ^Gg'm|~h3$/oj~~DO"r) j'ᰗC`1˓&ЖT`T6SԠȆA^|4egb>B 7*Ƙ*ѶjV^}`&Z㠸TSdRNRr$1DECU9{Z5MME7s6紈aw&oޟXqd7I0fVB%9CZе]PeQFݖ2OT\$ Eq4iMK9H9[/ --gM!, g$ kl) n{r~i-nM]@5RsN9_|!ok*UpeOOJ96:fڨj{gc#KdA Lv)q21wE4q ՟rHM! 3$Qt‚5(J\Bs5Y% ޲̹AJƹ b-"vݶ z KAD _t$K `tTALIkkWs;e+PjY܍uyLsLZM3Z XSjn9(r-bty:;mq;"\xg[?z*7gB"U\Ah5uac*պd&. E(Eׁsnc[EagB@g9 qWr*PcسP6_^ < 85d|R>0!_yM&ڂZеs?xqW(9ʟ;z)X#+kE~md#Kcz zy`@4s Omtc ީR @@H孠yUah3pR;7%7xf)qiGqO0˄ e$"f%p^tyn0^S/9^NoLR\)ʄKP !>*3Τ~')ЗºhZGC2V(kn`W ?Ȏ.x.A^y~j\(Quq҅H)=FA^Z%B9 n%|'Xz5ޕsG8'LCQO[rZQ8 e$;3#?dFaTIa*ҙuɷ%/x#Uְ^Y`Z<1M;P cWPThsWFŊ>1I5NH޲@u"g~ ocRE&I>,q.;ݞ?y.\Hc#z-ڐ_J<\rsj$:,#_jAkD-a8pc9֌{L34MJ).ttxv\ÏZ(ԔPN`*ɹ6T%H ]tұz׉Ki^tuRe lU{}QJSJJxJe tPL'78k {8!T+d13 B1]=4;Y:\PCCN`f1#$~ BE`IAd1K51S%|Qq3KGC*U2‭܁QA-m=3{&9A Ƣ튜u"KӋT!>Oa?Ubsaq{:0Zge$ZE4Z^}!{U)tj)3׻\(9?X* \V,C`er+gI1Md62Guuoq@ Q*29ez,Sdl2˰5ý#Gf9Rknlţ"g SB pLt>3@iB *JMv=+-U/:z!Hee)ú"5u Ds&5a6Q[#Yآ; |ϸZ0lB!8K,GEZnS( tjT8B*Caj3jX$"=6`Z!-[&V]30 ;G*r[ө3kvE[x+fuKRҪD]~i!:)%:[bDyպøB[W^ʵvq Ms-C®oy:λ5x~<~Aqܬ;Li2 \o|+eWzno2BۛZ˞)e^Mv ӻﶣV>174QYVIa rQR&( ~)g۱@Jwr=Wq6޳=$uEom ]-lYi&U!v:ۯːެ-64<ͪdTTȎ|UĤe^m Wpcv3H?^dn#ID{JqJTjCm=CS_\]=`^V]=\_T]=ZGRN+WWU^]msI]J䲽QWZw]]%*eDuETJK7$woUV]WW@DWW_Pyˠsp4 .^hլבGByPpPW$',C#DE7dt0,2*=+նalbɫ U tdƣКt4.WrYp9tp *l5Ξi2x9 Gy▥ʂ\\n71D^| Br1r17%yj)06+nF.FhMVJX::РTַ_}Z(ڜ-rm|v;tOm9d%<@rFϵ,wJÃ&6Y ord<?Y̙C:jtNcʠ60>;%5ńGt=DGz40JDڟcDޛ?ITի/Q]IӲK!^CffR$%)-4a}O\Y+/.WFAZ Zh KSNM~<̾7Ķ;FO9:XgT4|gPgspBH)eVFEsk 6N#cIl h+~=%LI_d߯4MQ6X WBbdQ8#] `G#/!0z0=ˮF廨f*^ #_EO-Y+6;i6P6 jT>>07Pt!0ʦ._C * \9ʶh|=jAOD;|2t}k36cxд|4"SaLCVbj52Ug.fQ٬1@{ʏ ,r (p=S^s.٨ !0爷2UE$x>53C俭%FadpX*Sb+ *Jb,A"AIdb=VKJŝ(Ts :/T{´aJ52R%TGFzAᬥVPaqMe?\uI8NqB48$ a0 cbmYP.(cAڤ>{g{pwO8_Bl:<įp6 0.Y~ʧCN쇫Oߎw㛦{yΰf]VUP tɌ|曯dQO.f{T)gT39eZlqgaߟ9oO~{ۓo߼?!Nq|DWsܚe$8.;\|[jzVp8SUętQFU'ٰ߫ Vs(KonsgJ^8: "ݷZܕ:h:i3R05[S ;BQ&T})(wt,K (fx[הϱ/X ٖ߿Jpn  \9 M8lmsE9Rxjun8Lm |؛ RohhAE]ccQ2fc;||}&:i*2KRV;G4Ǒ#׿L$>8Ŏu/ qDr7Y;lҤ&MP,Q5UJT!_$2>)9-ojs˷Қ,!qu-AL9ea}3jg -Ő@&:]}bs0V"%j6D}W!jg`6y@sY#iq3lBv^l`[h fq.pZJYdQnL.P*HNR`vE$Zr~E'}7׵}cL`ȥ%TLRSz0ԊYcpjmؘx%k)MkF/jLb$SYْC A33e@=RXm܌֣Ťz=6DtH#z)GQIA,UT5ElI &) KRYzdlFȡa{'A?U(ݐ2mNx QrW Kl ƹyZѝN/;"mm^)Bll`Iɗ%SjP#|tO06!VJ055c#%b'GT: Mcgiմ)Qmz$uIȢ'- R)]-b3u93;+Izzq7s肮qP/UkpoO3Ʈ?*cò,Zb"iBhjTZ&mk䃡.Bé@[6'ߍmo6bbDS*  J`[,V ;o^H߄]|6}sH-' }P/BuC9~ܲhC#HKz))'SmNŕ>ݔT**N ަɶH{b#Jt;OIΙy;uaf?>:>mv_ QdpӉ>krI-dXO-oP;g`j8>nCS6 Gxv?g6mԆl=Ȗs5!\XTLb4>Fi\:CH aIE8\lPJfhn633ս=ícϠRwS}Q&bI>TWT9=^6C41BMk\ReVc lz@KR}VT*.Zoct&d_F2QG۞~fZ)-«5N!T>o2nwRhVuLo|҃W!OjhX(ߙ;)3ȊXbў9!e2V1+:X<c1Dur>|4kPJ(,\Klj!ZU)HFa<0,62B;8" kz?ufcf絜w&>99x}[)Hv!!{g ,)\M7Pλ$Ȑ0msz`sF6m .WSbjRu)4Mq$>ʜ;s.=lt-,0{s_łJD4PE+ləTU}Ì[1V;6L@,9zT!+34NUJ5rQ+l|QQ]GfxW1g炈ǡ8x5 ".x,V>~삳{Y t3*Td}qƄXSk @ W&@nu%Kt}͐QDG8nq9{.z:׼:CqE₋Rw5b$ i J2|}u9q%sl2P0<|Φ#֦(W"/x>>W2bGBяJG?Stn.fw뇳]* ,`2b;S% svXa 9 3ҌgYAz-K9oz8eC%E؏-- ZqH7ח=b.U , ܃z&A̫g*LWg<ǃޕ;bkķtvڮ?;tR6/Ϲ {ҋ𥭿c*B$C"4U;C1M]lVͧZ040LRiK29 ze:5yᨦjrU6D^eu&0z e9{<$>X]ٮ?[S\*ݺbQ/,@ S(]N7*^|p1ޱjpL N*U\FR$g"⸙GԲ/]`)$؄%,3xV=;gA׆<U >qM'6[쵏{owswuۛor_]"jzh;O{ݛ_W-}H h_y*֯?ԯay5>7tᆫ FsgiwvWq?Ku]m'8.-G߅]5ᙏښz+4^qOɛg5\=xrloo>٦{nZӻg6a+ncV8m>zz5ݮ ڞ&mw\)qg=<~ݘSsmm&2SL' ]I7M{eyƒލnɱhO>٧]nNs%R{^ =F)*Dz{oR\kr9J91cS26JB%2\AbSs6$.s䘭5^eP5ҴN<̜=~KAOZgnlb-3[OPYHE|/lP>m< ?TRAɦ^+PR)]כfO1YOKM?}AHM{n4/+֟OeV>f'1I c$ζ02Y ɳPPԫR҄[|lF>'g^ ɿ]l3Lt`1]Ono?. /WYmW/ aпϿr9O/?$+?7V-ٻp"D"$%YzHCRTSY-{UzɯW "[c5^ƷO"rrp}OWDr3/?}yyQ ;Nq_Ԕ)‘JG^@\"NڲM?kU!|y#[CgEye4| ˟yÌSof\C}k*4sOmI9LI|?a?k `z ^ˮw{'{{xߑz9]#_f_?S% }w:'sʟE^!qZ} ࠄTs"*_n<BwT\l!.HkWDz,BBKQ#)Xb#&1_8ڠ R  YC,{S, 2kc85oW%ofb=i_²y=Νnsp?-m}/%-7M¦)wZ6ykd~v BDO]4I8.q\PWJ! $yv k1^f,4$ː'|r:))-Wi`<ЈfNb0lDrrVKIyn] XIPDRBHψ@1'7JJ#g} OڥyBgy4J[R`}^Ha -}j?oOJ7w(wx+Kq$KId}wc;O;Zk߽a}Y*F\U") Rv9LcR@wVljCHg-&X"ʵF= BNV20,F.k_s$Пe?כ&BB6qۄ rM04Z2iv ë/W^U&FBWRNSQd"UD UFF)PS.F3i LlZ; YKDJ@ Pom2nؿp.iY!tٝ{vݹgwٝ{v9)9T0+:buٝ{vݹgwٝ{>LᯃA<&A+jcQ+#A䅴i8WАLQd*R*ቭ@ `ЛR8=\X^TKAZZ=>82D(Y9 B="`%TIn+RL4g,FvXVƞ¼vp)8*#$5/wN,o Tn0 96B )(1e*d&ߴ!A(H,QKXf8 Ys8l+%f6脲161a*vrαa4 "pbH6'0Qz$g!#go#[0#.@pjs8αeQ~΃TȐ =yMRd+ȣd ꨝ+B:|?Xl}iaH;qZUP86¥_şq}5{lqWz+m%IWL9mҚj[K؝Ք%KV8jɶl/nRlϱ-ɁNjaJ>{@D~m/ǣr0&w0y/pRQ|=+gŵ{#SWE}%1:VQsB"LE]#T5{D1LupA '˦g*k|`Z$]-L%ʬb.[ DybKV<#gG?18_)MmgD\Jo@xAZ,f@r%~-/Zkù"`epf%eΣ) AzIfOQ=A\xmEҺy-ϖq:ASpZWD$'H[i!5R7I$0=n U j! 4df0KmS$nN:*sΐf|9~sҞ;O?!4W6,=/};E9 ^g#MCA 2J2!Y2p{P5,zc&!w DXЄp5M{ݽ7{'Njѵ.ZpQR.H!Czgn ޯqX'k:8&4yE Q!+ 鴚,6%ąGϫ^O򊠔]zv/ls"+ZjVIITd N*XHTU1Z%\̱eb>ɇإ=vE~Z`EO/?g{rdD'+rkK q2}(n-8o9P$h(0!f$O4 .(H#?#[^2c^3qrCX".V fS$.KeMf骳שhFMKc5(@E*3F=>19,9Ƙ!m,8r$$N?8) |JP2>:Y'c{$c ^kړ$D\ ҁ%Ӂxt >DQMOF8S3-K-VDٵfZkKik}Ԯ"b}l.vݓ&ݣ& L{fTr+uG-'M:łqJ+:dTܨbhNynq"MJF-b]ro0CJx ^Xb)rz; \Ӵxy!B %\WzcƤ.pc.91_wI/ vwd9wȲ[kpѓeh^mP ? 0ӢKj)_…[QoicmT=e7N-߷\G fz %yvx?hhZt47ʷT/߽>mzW^^ <;t4'GmugvLUkC/PsD]I,bR5c6(S[8S<$.MuM6 nn;YMEnxBhTLyFz,9!65ý#Gf<Т"V^iGjC:*aZ{1C1V›bSB:.ې51j&9|ZiS:yZG$ ZWsW.m2]0@3̽T:pr#7<92ZV 5Kv$ 4Kem9cb ALx[kl-Ftvy)ߺU&!=-wpMBb%Ri%KMJ'a 72P9%)2!D!e`j,zg՘IDk z%zlʤ%pʰ9[_5[e5)C52)4"OwG_9W07cM'_\C;h8\foowJb{Nڶ;nGkNY}mO识_Yw Dg!,FnݦCm(*iVYsO VׯK(i\>'; exHֈ2[ ceFT]͉ }dDjD]Q( é&@TE!hk1Ո!n`JiLt\( -*tBIQhPEFH"I`ci9 TLihRl[%Okؗ~W_ i;C?(ś0 *, #RFd N4qm%Ȯ {1̦.HNu'$u= V8Q" 0\A(̼shT0(6`Ki[bY93(* %B# biD"D)YH!=H% M Y1r,W쓘emQ JQQk"˸y%dѨQԛw"PR~+?S[^)KSFZo( J SId YUtwAR9WM8B{~<ԻaɴQKeB,ߦL I<6w;Z:^b*B\^͟?, ݆*e"0&p_g})êZ6LrsUK߬//`E95%>Yo]NGge1_$ Vnj+`Fsȥˆ~0nZc/%Î;#!.f(|(_K9a:=R(TMZ@ÔvLE.8RhG҈i AV=A9DŽ E aȰ,Z)"V"D4N#gs 4teL5P6:dA7sx3| ʻ%yGnB8u&8U3|HtSW ]X 3âj%1  $dbsT3C9:q'Kºa(0mFcT ޢ@PD8kTX!#r\:\e1qD℀k&q,0H@`(& Z1]P9 RaA͉bkwO8Lֺ >հI:XLuH"/\ewOIN/UR5y{+'RkU`di WJWnLՇ+Tf1S5N p4|qkq\\sUG_z:ч?9z q/޿yL]aK{-w&]Д@- CSŶZ}لoW KqȟFHߦ`?i-[5Q%Xt\;&b:<[{p L5OvL#$wDck`>ל/U>or}~LuxY sNruB7nX F4E1(MNN ;>qJ&7((tK(fxW:6^`X'9/N_n }jE6j{699]>^c(gTJlr<>t[%/qت vKHOk]Ղ3kaFq 1n/72Ѫ-Ef xy4HJ9WGf?7#G m3?oB @4xR4#+^XgXt}AZmaJ[ =KCXi7zKUEAK' DaRΈJF]m7n+b@9y©tw3~?0)$! G؝n?#(˄Njڶlg7*`@.2eAĞGlpUe#DμהH|?ցr|L. &kBvn]Z}ocIOm0.́uL]Wz|z1h]=SzD0~4:c1ЉZ%@@?A Zo Awm_崸YNnSӢ8I/ >c׶Jv^]=m$[kKr6hZKCpgfk0!?tDXyԝ& Ԁ:Dt""/:?~W8-0[&~9f($Xә\IELgju14ߡ7? q ~ܔ*/*Sk`RI mw(7R=W`F\! ҝWJ}+MɌr/LQ0Zw`/l=3Jҥ3K$0!ɂ3 ,G)4888| '},t׍nE& O~כ~ /֙Ag&+`$P)e%5؟g)PpG !q- 20Iu"QxFԆzזm/i<Vp\aB%K(kO~MeȬa sK [Rg)zX|L6(he_UuL{cbTmbAA& Ff@'D̽:iWpTd gzu~@+|t 4ޢ͡_ƸOĤH1\*@ ӎ*G3HS .,{}fG>h T>9py Ծm*v,W![oKtm-\[jؗkkZIJNL{m;&e'h1sgOOoT1 J+4Ѳ1_4"V6v!ZAq1EnAheZeg0% PgCpmK'E(~4ʴt?IJa3m\RwoGս;?jJdۨ5c`Y6aoYV@ctH-=J[H%HeqHz Ʌ/r& ^xP//$$1T(Aы*0̵|^>ˇ[kHTux.B8I PLImD C *@2ݘqHBux8'$Tx :j>s r DKM#g %t1'ڗ\ˉ?1˻E;wg+y|s;Sg271KݭHؤ9yd낃&YiIDŽb6;wMKpS zG2[5؃P2?$Rh+!E= 8qR)4keR*^S{&ReJ|Zl]_<Ē+7)þ: #R9J HΩ5By# wHq7Ap*U!Ip^&bn q1˛pdT"(%X\iPe6MI V*FIzNwX22nſMnx/ᩦ"7ke 5ĴY@cN2J y[nid0B3MH<2A$t~^ }&zܿʎҘmXY~1tl/t+^.'}?PP5x2J[^P2:cqǟ]Ao^'޼;!NWD:*(M0Upg蟎Yh8xhCFg=ZJ)׌{gqָs+둘~s &zqRk({fv/Th&8Sl.~VYZ]5a &jCD o-@D'u.wHY[u,h|S^, "~ju:əCH4hCDe*Q٪oOmCvBG 6P>~PGЊ:oJ ](,`3`VjM͸jC=ki'L dmWG >H<0lST&C6j;UQ5Y0+dDIU]JQ0ÓQTK] h;Ю hvC"""`٤#sJ+5PCc4Qf'q4SF: *2S|l)Jd%LN"U IڕwAAu'8%z4kp.]RF,/g)Fh({tʣ&G>$ULPCX*%%Bn"SwmT4,qQ6mlsR?#i%ڳ 5}+p[WaYg$0n#<גS*ՠR"HBsDJK"#ְYc䬹vj6,aT|+vn,ynlpL1uY$NR Kjm@[s^CNoeR \0x0H%\ VQ 5xe( IkVIjjVOj[f]u1D@F#D*Q@!"$Pΰ9SIVBh+8;,k'"u騅7*j7{(kzYp2Jm"̈2@#ϓnk"0~_pW6_c,7FjbMnЬ=H/1G--qQcW,yZ IPZ iGsc&waw4!AG#0^&EdR@"`-0.hңjX \8(EO6Y-q _'=qܴ#7HIJ άQ5sXB6=b#oK2O)-x1DμZQ$s.C\ӽ*꓅0p.?6D\ KHy*'p™"98uGZ$Ni(F>XD(3{:%*xbAkR0cb)H6,FRqϠ,ɸ=f]}q0]6p/տ9B y1|Wʡm'| P +/ 7g>덨Ii352IRGSң,YyN͑3/S❃n^~P" Ä/NFxl5Gm Zɲ|6=Q/eHMZ}T~eyvλ'"wu/Fh8M*HPE YЊ3bȖB圝7X*)ܳpů{!) lJcѨ`TGɶcg|RdLpՎE~q01Oqljd=jjFQG-E103Hz!X2\&@$a" 4CȊF,aMLt$RS) Ĭ#C2y`ُQ_:L1`<D"b,c{DQQІ^(D822btV#|bHKscg6w1c ^Jr%qJ3!txڸa10,WXq+#b5r#yWgRNYKEYe=.@RR$͑@$<(`4Z62`"")cҨ⡪8<|[Guk{A%N=UamAp}3k:]?бYz~Lpdܨ855`RrhXlseSd\CP"> <'ğDIV iQiBVZ:]: / 0u{ %U:rBLD`^*a2(A=N&"3@f^] N\PG;b:$c,OWڶB7һg=RgOg;dtʌ&QF<ÈރJIx#[d克 2J0zspIsC3sHRK^#8"5$CrnJ5rKV=pd2]5׾Yzo٠nS,,n,ɲttH_[lQgbGJigq!FFڄE봡pvjGS'y"0+)7N'Q>n'cykPn`3g2Ib̀H"Ck%`E0+I \(Känn7x^h:7,mA_|C`]?owߟ lp5dv*F'xШY`L8922Z="< "^>=08`&rAEd)2k#ȥee'DB S7ș[ME^9ṏ]ʆҤ HRY\b CӑEWgyF<3m Q`ݱ{"f9xf>5g4! 27hamJ *'e\hr(2xd:?=tBO<h!@TAʠRxf("HږR$X|\o2z䩂e![Z7ZFlG0~sfޚu k=zi染xkDyȊ8s5cW`5?d9!sc6rՆx-/w dAevuQS$U0#{BWxy5;]%Wi++ "̻ ߣ ؙZh;>\V{nZW$c%h[Ҹ Ɗv۪FuĚc`kbGZγS^!{wᥴ͗Yλ:!CǓleox}"]%bs9gL ^z73,vP$nܽtm<3{IW 3+IZևZ.X]{O`tOǐYiE0! x3҃sLs )EtRvŴZ7qt9+d/3 Yf6hVڥ*8if0^g]@2 6&"3}hcN8iHBzGUz'%c23zs<yւ3*3x h O(#U&fz;!&,sY޲=ο.mxw8zwCҽW/%ØxU,N ?6iҼ (3kQ'0榜A(}- Iv=d]u+#uL5Խ>'Zk Ds>T3eHŧH6a>梌3mL :}EoHtr9OJOb|HѤqpuƃ9W+bjOO_ 7Mܢ }_ո8о-h7/ I~~$Oe׳ ;R??Fi޵B m:ԭ-mjuoL`Vzen|!&#$g>=z!ov>3dO Y<&u uFp&@6q: Z(5 H\fe_I |92zE@%or9%| 3@`)k7E!,[\b.Vz܌߼KU mWSG̈́QVr`o*0 uSgWT,O\6(P5(Pfc99"FIoj9:y@$\RZs}^ һ{7^7->~w4x5~[iR=Jc]q<[JĈKXÓi˲!*6z崗W~֢'ArԊ=VO?/5hoQ%emExGJMlz\HiY&l&,Ɋ2Q0{f5\;9 *]qV(4k2=O'H%꯻|*3ZK2VqջR2/H@B(qOHߣb!'HHj% l9w,GXМkť1KHJ>Q^|g[].i_:Z*bvY\Iv{MD.0dox"iLI>'b}]>KY8k,] 8t\lRXɗ\:K!2'鑬2Bw_y\uNӇ4.gۅ7(Q(Y3nt%6A A#M063^DyؿOk>fVi&KkLAMUR> p"H,iwJyL04 Uz7ևW9a/ >,܄7;'B(A^tRqpH^UOlImE7Xå]qW.[H;T9T͊3񇶋?, &]\BܙMiҩUz{5L1%5Wm cEBj;חo6ڭm0բ'8C(i<_tN-zZs{iPYlբ6uիƝ+Q]u:dmvz&*dݼ$okDž'bg]g^K_xT-R˫ mGV~f\U4qVWڭӤOsW<֋^2+P-qwkWxP\ ʡGB~vn.^zX%ӵ9{lؠݧ~}wI\~>YW6Û>=9w.>,5~ O?nv>O^ KӜ>xrlZqyyY`掠ܝT9};4u:cL˜2yE6aC3Hb.S!#-R+ (O"906Qލ6&uMoz21~\eFLqˊv7o<*koX#VY2n}s.Q߽IDǰL]t: ߑ!0@Qavy'כii![V&`Z'lRIeVNuQFTr5t5{)A θ*mk>ݭu5Ĭϻ[ǒm3l7m=V{SKm?.if]m+m`iAZ-LGDހa2ʹmʌڨ6z,/ J7R6E9)em uNAv8ԥ!ƚN8rHKE)tPDJdCSBYx8{F #<(2yGz=4bP.Iol#1lѐP4?OKȳd)!yRSEͺ oIZրc6)SY+ 29--2 +j3qzStK6KzոZ͓ 'Uqs߉fUyN^AU=iwMl.V۩=p>0ߑ7G-YsbqgE-s\2:R@g%EFXR!uǛT]t) K9Y6Ԭ_0HFf>̍k_( : 4V,vOE}9WĜ-ڃKRS ^BKieeobp,*eIEp%`UD 8kv |\,.Pdg_~k&s5|&f[3弻Uq?G҄G* 9"iH;jUNz1oG1ga|tTԂl ?~ fZ+bBaKdIϪ Tl+ Zx!:N_^yvZ|<ؖ͠ߝ|WHQW~yrB~/Õ>`t`HiL#Mn T.MHiiDWz-/L`"u(:k5j2I AB!!%[G(1D;OY,/ަ zgB<M4)EF$kqFjِ ؚX1$\f`o&K4Aj8͝Н}ό_֓A԰b"e6dybb9f%3(m :12i}3>)6-tv_PzQԚٝr:īo ݟ8wK"1qʚ(!0J>E!!!&/QRPIL0x?ғ**D1Gtc.%I-^ *FdɄKOrKO;N''Y0g~[EF*-V 4[H/~=%SuK>࿝-'_VN Z2b7.絭G}?~^ɟ'exgg,Ffqz6g 괄}{{m^N}F}p~3wpC">ڡ6Xkl~^;5}Fk{n2֯Cwڐ!ֆ٧ +۸17Tkx"{͔{^`~?TOҝ*9TQ=zk~z+fN#m<f6́kk%?|͂yV;Pb5N+S//i6(A~Y *ƃgGC,eaC cĵ0DZ6l۩/vOZ=QRx]"sCUzyh*LvvϨ偭z[x"]%Ra~v:L;f,E޼aCN]x=NB@Gijd`6JˤwSczu@3: W;nxiK~c bczx?tf% >'97h$%JAA"2@FzmTQ1%ot1롁7%{H{ K$A2d5\_0wxFrMJtK]P}k!\g,nd%,MYaM8*rѩtXٮMAg߹r90QfG=ӆI;>Lsh6v@66Ir[uoن*UkA-B@͌2WE:oa61_ t&}N%Q2I9.b=6ұ,tq`skit?@Ǯ܀hwݯtu?.uU,_`^ي3sn0闟ĥ ?ԣA&йwѫ佷o4_h6jdpy2X0?_} _X!hF8 e`>pQӏӬp!QquRJpsW}ALJkyJl~p ]}kK "D]_GM,O`:g,xйHST[::$v{toғ=W':<pԔp8r^h{pdr 2Zi]<96Kt05ʢ!JX ;(:}Q 5%?ƲˍկBT`Vd(2DY%R CT]#Jq!A:"]_ƹ \g@G! DCц\;Zs6!}Wvim;!~g}4 _3V#JЭpOԤ>bK#0fԧElmne+qzP$i!9s1~}6w'l_y@پ ʉjQ45qY[cDf8:_ԀFwTVy]r.{⚜XU:1rdl"6ZyU#xPS,E-se*X9۔q~/Ѝ{X[i^(iTMLE+i\!XA]oy&i|u_㫢ST#GL ѿTuS/f'^ IEn%b ~IՃFr0{n.l߃?끞K=[VH䒕2ӓ^Mu aHt|ِ_~Qzcө"ėQu>嬴t8/wM.*n+ռ}{{vh;wjK}124Ȑ${ VKu2!i4Uwe$602,Ř|8 Bg.V&jӁY3sj{dyw_=?=yKe0|cnOV~[ٕ>Y] E„e4_ç05 ϤKބd$#Z)N>z'4ȝR? RKg >KL 5SBX\˄aU.g3*n%$xl)x>9hn+ězBk{ه9[&OJ?y|՞>R&rr7" \)'DQ8AR]uUF8tgoS[j*&E1B'ʁ^C֜ :$;1p2*Iͫc߬}rʇ}گh yUnGkGJicb,`Yh~wG#ppߪ_gPo=4jY1 FRBc%aE0#K." 4y~s?)yi\_z{.l6\nT:cen7Xөѧ}ꖙHb.+T>sm`v*JF6VJøQgtAnSxƓهؠg%4ymȣ#25/_II-AKy ~Ԑ ð;ҝ Fo5o1P7B,XE s C*D^|nV_}o ݖ  mjԣHQm77N,ėjx1I/A8kwf$VA"ʑ7LI EFƹBMyڙ`8zY.\Vr'41bV,;GΒfd"dNBķfx2}>$?!}?*ҾQ\Ahf{FnJlrmpU2I&$wS.>ؒ#qCdԖ([㑪c H :Lkx>M{qM?^ӼP!ͦCzTuE1GWW飪`FwW|&_q<,>(?NC2''ѤA5jzzA>+Ȳ~Rӛ?,tJ] \F~"3#Jl]귮 'Wƅ>p>v}>}:2 $y<oʊ"HɄdף*d8!hnFkXOZyx6/{TNo7$[]g ] 5$Qʳ="*bwlUF _ ד5U 5ęCd 6iK+o&;Fզ.阠z^+e ~@LjR%腹[Qm:@Ht, @"뛫}IϘ649q2{/> Rx0!s%P0.*a`r=;ƙ2l2rHL0yiI@ #mr\&l,d4ݣt/r%2#$]P{@1IBc*ƚj!{ FE7WhUfh[![B7zf!% @s4A >*L˫VUϼcjާT` rA?b9!lB2(AJS)sJA_}&}+dv}]i/JRz(+9p3hT6 X@ ʔV0brbh;H}l[,ƭ@ 3D˔ 8plR˰5ý#Gf@=IiVU t!cwY4a#A29HySy;%KfVղCU[H/ n=R'NSih@sg6wڴUTN=Uк=v$0IW}ԈL @ +^'>\0L2ɋ"?|Wʏ*70n/@|y\Tɧ  ?_ % ^[j-b4@"r>CMYZfJ) #q_7|%H/U?_3P5yu"ߚɻtb|#?7=7aKwL mdjf40fm }&Yt>W ?[(/lE2m0-uQ ALx[jl)FT<ځځ{Z;q#`>P\@ Y*d)_T$*V9)MV' x$ )[XAD佖 W&Z@hLZ"2;Y3ySW`T;WXf]b=C?ibW0f׀nvGr%ju]yeOq%tЀ.&wۊ%Ժ[gV^l>|vngBiRnxW&fhD?Ǟ 5 ߃u?^Mۦ+:j w{zꍵt5Xncg:Jk>d墡 OC'^P̞df0PJ1W_NjY0ÆH]:Qpʗc%S׹..ۥ2l?\*؎\*ߥ0npT2k B2%ST?Vֺ.nR@hu/?=YYv4&7W-394Au֖j/wRݍJ:m*MUw*=U'VE.gbxߝ(mz2?ȳk:"]#FYk)_ÃF6b7>+gbsб 6TO`"k"`Z *y*DĨ6xΨ`|:^.I>qrs ڽ1Ǩ!@WSL0aLO}0,m  bqdR)Kc+<T1(wQHm.Ryp`|Z';6f1aotFH01ZAbpaP Y1girhc U6MCu+¥&Q&aJ;f2Rʃp-a`5,B6tzNS:ߣSrd8I 0yf B3w" 2=Z#ޱ,s lS}> ѨTYUz9lg87@#ʀץ:ubzn P(LDJNcRYePUJXG$cEP)Z\7iMg>/T{´aJ5{*<2[g- +dDk݁'wCwښ06qD℀j&q,0H@`(& Z1]P9q`̌Q\k8Ẁp޿ bTk$uLox8pәONN⻛?ob?: 3Zt3i6 3o_)zp _`8b5INp2I(F֏ 7:/?ٛ߼}~oz{՛wg_{_ .j_V( -x"[_5UG:먛 _ۯ@rGO#$o7\jU*$F+cm׳jf4WgΤlUax]H?GnQW_M^gNO*T$^:y]%NO b0fùqs +>θ0Iݤ_eu)rOzM90_1bܓo9 VjYkb54~0`xQ;X`I0cvZ=!u}%)+à z#+&_vfrk1~曏#6`/5*# VV>B=/숰V 6o` fAHS.4mюJ8#Z*vvqS㦽*蘍&6,2tDti/5%b赇Ȫ牬"DUfkLQT4(,edARˀKf@i(g2$=ʐ2]qC#$,/w3V'4Vq7;mA2DU XFpXH t 'ȬsF>pn6->ىV8Q" 0KBaS-IE#b{iJPCgEC%Dh0mJR~`H %(ˀ\1`GR aEF| |^;`}sKQQk"˸yx%dѨQ:",ou9/ 8ŠoiJH %A a8= $RXVt|8I;ime4BNv@X2@m,@}93 ͩ"(`y|Y^erX֭j}頸2(9I8Ǩ"eD! x8p[>,R@Xɾ0_6ِfr0M] >mDt`LZ"67#I-DwnF2ofT9(P.jHY)ү3șWUu ?^A٭$}>Z᳻$tt-ox17<]'N"|6sW놱z|TUFΏO~8~ڟ 0yQe3{To'hnCTe{@?-NxxP!7VWSR?j .GzAW?cb!o"02-}'s##A&%^zK2^'ۈ}A>.?)ޣ-{?eǞ"6DqM ͔!<g.fR"{\6Zֽ٥qN8jIYYrHf7\!} n* ,В-{ʕFF]!}QWZv]]*gV=RW`F]rurPM>Gue1hqhju鮆-aMp|}x5ASqH \N&PsxwV'Q,x'RH{vˬ!;3< Hs`i*ӫ_^DA$%ZLSH)e4B)c{\yVK;2HÓu̎%̦3. 657 WUugr_,/a~r,vsx{șe!6`=ϣڊм>S82yOF⼍ H*p2$.,Jwњۉ/'5)z𭘻Eqr*v/l)tt.qd3>&EVWqHUƹL/-7[ݣvޛy;ańNE]>aGs~~n68ZD7c:'(hR"f=qjVRBS gb||&svq~QWG6`v]~`%jk'Uno~@uq[4@|?j¾hdb1<#^#wMyIP$qI`M%n"2-Y\K:e < .Rs%ݣfr,B V%+h@{b>fWZn&*u.єͱCGτZmC.чO %8J; f&:]{N[1 gb’qσkJlx6z/3C4e]PRR2sC7̦=0cp[4FQ\H6ZJak-^(쏮>IӴrUc 3[[rNc#4 R7gmǻ].GuK+ I u\IpR]\Vw]z=3$`&غiM֫tf+W[lIܺ~Vϳw>Z繖j0X9~^,,t==7nʼ1B9%Id[ӳ6Oi}vE`Q1\ִŨzL!hy7 %Z['r+Gw=,];rbY&).flvMh[)U b 1%@<ǣ׉u`zsBdלGF3/qλi8ƢehfLD$8% ~;u'{< >9 +Guš(Jd`,,2 9=MfU^x%W: -caRrښ$4H*#4ӉH`OT9IHƽ2PM@d iٱÅ{.]g.˅R7YUx+_.Rߪ?~ӏߞC&t<,3\"c'_Nypu\9VaEM/2RNg:gZ/(ߪdy\yw7wߜOoNy89ɻ7 }QE=e$X1 ?nB|n _߯?jmho20жYq)/YTǽJ\:y4z7)]cMч昪p'ӎzԯӜq*~m~zaPs*[_\$9H(}~wϴC<:h˯;1>8T7'iK0TZ4:c8! sViFKga s.}E1M)^Sl.b?ԡ'7>!x$UL?NDks&GG˅j'I#$iIesju>-p2ploI&VZCD oR@}uN}sDdHcVFjMmWK7k[YRu ToQ;%E#ML|pBg: FjQ{|!CO6I5) hE7)z"c)e!uN0nr6mn9ؐqꎛ9ntz 5Xpе t.^9reAQe:|<'ƤKhz*JaV,X+EfM5<3:1beHOE٥vC=$jN M^̲LX Tzpm7Ƣ!>v:(5K!"Q T HJ21\m/RZ0kεEp]2Fx+4~*ߛhS_oKr}Ma1HJɉi͊ /4J!zfC<$4m'I$!˓NaKP MdfGK).pK->h9p*ý7)+gf'4\Y4}IPszX"gDց+OY'm/NJz =R**R0[V}vG'x2:)m@!3ڃZatItmeƢ8PyD*YLĘ3\謸RY8ZQ;uXOò`M'#e!T;kMt.ѫ˚* 2r=<[I[{Y"|#o ޠYUAZ*sH ϬMA0p.f@ډG{1c$ .?t!ֲt z=,kpwG؄MȸK@D ;$t&l"%ߞD^J!q.y_~h! %j XeG=sB3*PAm`O\RhOhCFR ,l xYg5wB1S4-mٷ:J]zV#B;?KrdyyO[s q?]Iwn.i'` v;LG=(}p;ӤH}Ů4' g} hHF:FL PKd.NCC/R$NiJH9BVPDgYF e:g&!h5e]oGW}J2V..ng._bԒeqU I5$E5gU{geIC1G#\q@YU)us뚧Ȝ6g-tIͭCYz)vA{&QZLuQ?H M-)CaR\ nkQ9XMPeHoe'l/Z&d)H)f!,4&ffH 9NdTLF SP\!P (,%o GAC %x\f+c'a?;y'P6X %/2f)JdP.Q$q ^ $\ʨY[ &p:(>f]5Ry* X!tNqt $ʌZ/Nff3%/:h@E$*(8x$'$CTNJ˸Hɔ(LgiN[Cm5H*H46ӌ/-Fs ?QY qLNgF)m#⸵<`p[1KB*\pəոa}`f-ZtG mD=H(ٝbGYI^NO|񭫍1Df%Qd!T,R6yN s@l 0F8U<.sb' t0B,qErnFv_wl̦G\7qLAlRy6?.@\x['Z>VtK_ aNO:'R?22dvCi̻="BZܔ1!5ԃKUs%wEKsc|=oy7]-aDDвJ1@^ۗ\ 77>i7]ޑbE-RpA+}^;u*k=QÒ1Xj,Gnsַ; e'Ռ3=PD%G'dg Ast2*`bc8Ch~}tLVOPo9~ƱF)+jA:J՞m,FUR6~q*G+0f/.~KG$Q(8VD!_ړg[[=2YfG &Ui<)C.;V)#9w$1]F<^GXUHsEg,ilR{(҇r  Bl]wȊO(Ȭ"!@"xg(e30iI)vjC]j\qc,DY7M8 ,<٢AxWMLӽNyp"BY;RJ"Y^< "{ "e *;z1%!g.KRY5%Qp:F(K,+_`ANȜ(c9 e9*JQ[$,§ < UAaBf]*j:L̸Y"w"6k2K#?b1'-6^E'n!xE|.f~=1^8EN9\1A^ .˩@d6^C'*PI%Wf9a7k/:Q"b뒶 4(59p5) N&8gqii\i'=yQ(QH)C%0N(C{lRo6(=8e޽ }m)~T0LdNڞCL‡~ĆUz?Yf}oyh[Q{$g$B.i@{r6]s)?CJl9B% "ə!iΐ+! y}Oΐeΐe/)0ALu$tI)09h5ܘ-N+nU#_@x:tQK\G)tJ(Bdׂdd]ܸ9d>kqxzf[\4/mS 5Ce .)`3ȫ$:)8?ZDfSK5Pj N0QZ/wZjy@p+q8pEj~(pEZw"*ꛁ+W9vAp0jyz}+\;3cUK?Bj-}+rMj~@p;~8pEʃZWD:ˁ+ɤk!r>Z.=᚜v+,l<:P#_kѿ3ͦ GsF7#!^ EZ˕ܬ]{kk87ON5Vg-*]V2׉9'u^߽˲Ac nWI#7&^뿎L^m{}\?/h=@[-L2?Dce䲨T,URgY5d'{Ԛ{O+qk)QM뇌o3-x1.sirGn1qL0R5kqΧRfY'a ތOdBxXwQrٻ6n$W%ݎUwN&˗u`)Hd[߯13((hy\-q@A7i R)(gx8'MV aX. lɒ'Yҝ++=3<|,Ĵ!DM<X)U'hDnQPY:RqKz=yMGf9֒'0-@'hENۜM#GDP@G & q[hg6[̬l20a0Yj Չe">r:xI){ŕW:l>gH3RYFZΦ" 0)W/s >LN.m՚2!VxF꧳O/ᇣqtZ/a3DW1A+U[{0] Uܠk@xy!5vo}anQo{o^߾<|8xo^& 8:YHzc~݄^Ъ8thn CۢqiyJMyɸ7f_gxG*j㵦YUmilr}j^]8ܦj/;N|sЌw??;ahͣ*/qﷅZ;zTwHuNLߟ}δL{\6ypPsf(hS5appٸՠ.QoR_g^\(_DR"ͱ8 |%7{7o$.0_k ¹I.pTj0a]Gʻp)dor<ޅZ}Wv2 |-{ͣ[U#RPD($ ʺ({ԫY@P$RBH"uDm[Cu>ɾ0M y &*#B$8e)gk`VB)sUuy5uy׭AE(vq9Ybv $Y!Zg3N&MtM:VGZ[­H"oIKy&1=עȑjFqL^a["õU@F%bYuN{ h-(%T HJ4u ˘fQX^ s:s* ԹRܭeM`Ax%| ޞC/7Gx?c֤H aH Z)T^#ﺨi`4_z "=Q68=khbR(! ` jGم$0&Yu6|Hr\3ΔEcFhOXBPLe%i1xGmgya>+FΊ|}V;έ!f 3^2/$HHxEi$<gx'g,p\'WOURJ([}rY4:*am`Z @EsуrL{G?eGP2cT&*&,RcsbFKH-^p[uYVlO<˲*1@Rhg ʸ!kfD[F' 'zn+gAڲE|ŶȚ7TfQ)+ $:V!sOy79 I\,aҳG9)c5 O/;u5A`w;&uӤ<Fn'S䉜M$s)K9,|){ ?GIl6DZDkϙ%LOЗ^rgYwf6rGxfZG$!%቏) F(7j'kµd Y1Lz*SY3b tL,# ,AkΤ3%sxT1g)ߘ+#neh] Uؚqb!<jt&l|?|] X I6"FF=bTyJZ*3!D  =aN{&5룗n6 XbHorVAan)vYŶߗ=y"|M8rG06 3.lB'T8j)j09-N8gҽ\HkY$msލJdzAQD`,PGQ ?6(b ,;"s:}3 ]Rugohtvc-8P0 ˧G!6&ݪm\Fw[(>0~,!gEoW7l:iK& L[uZZG H ͳ 5gVn_1zE(U'RPUBqASP0 F\^+U-H5P6s#-0kr1shÇŴ[i bnf37 J~nIꈜ$@YZYl#SX¢ˆup))HX1脲$ HHFzj1 JJ݊s %t}EB?[4أ%eb_v/nV鼩|*S/p˩+ON&!5e { a26\j^{+FYNȍUˈz{XUNG/kA t}ﺝ6 EB=' yUȝPOΎ0ݻm{7-rq_eI7MA5faRݘotڦY7SxPŀD|TG.WRhW{Mt[}'N]kV:FϦla=>#,!,޵ϻ9s;0G8NC-Pб:gw^HzCzo)Fh8D1,'GUFSy2N愪`sT"xG,B yP .L DL"傩DU̡dqGjQ@sл9+FyhoD-{󎈛n&[cЕN 46s,4~D5lR*%$։i F$,3\V(2yj_R#Sp&Z >R 0Z3&AqL%*FPzlp̞}[n-5\[՞ƪ ZGA=7umQ9kj>*x:熹G~8ns:{{M粈YZr{^kM$#r&x% Q kkd͡ ?c܏i釣ח nq?"NS|p8W1!gVRa ^,%4}cpŽ5[M"|ZwI~w4HI߹h\$" !OĂ0#OV\$Cٲ; Ҡڌuqji~"1刑1n[-j ҆-?g}%OQ<e8_힭ҽ5vWs=~nYTs/c=^֑dļ~$İe]Znl.me.5M}r+BVqKJ&AJ뇎wTQ2NN7n|oz6,v4H?R"SKq61㦋 -M{7`ŽNYDdu-5)ۨU2D)h+$8mPs%/fZ`` L( (Hv|]B??Bdx@P i!k債9+FKi75$ R B;zk?dnD^ rM{Rdf.]g:ȋyFff]kᨱPؼ(`mb's߸۳KM?œb#-{x"~E{<Q+R; V݌J5=N/HYsg7.5]nY5;m0ߺY &5-Ю(moȳez.IrNvvigW7!z4 g_Qݩ "8>'r}@jr0z go##Fz(ٯ%o{7$s##A&%^zK2^'3xC/jk::]:m&hiM BV%Gl%:˒6BLh93 YD2%-rl3}q/`i{Uk3(|ח/J(1}Z [{W ?z'raz>7˿K"K]3d93q4JZA #_iH!{woBބtoi{{u(R,I*MMQU·JZds9ͨ ;j!#b2j  =~:σ!(56Ȩ TӲR̂qL0R?G(NQ1 .#HFjtgYW4@,',Wxvx[>-?lߤb<>W GltA|@ea>ulje9g H<3K=xQnRa8s l  jnM`U)#ځ'ُ~:-Ԯ=AmA<؝B$R4}-/{.1癵~IM<.x# CFϐ F G(1Hꤝُ _-D""l@wbe^W9AA)"(*㚣mҡ2cJy۔=a* hxI`6+榡M G 6E2LZ8 ʈXM]©u5~uVҒCqQTE1​;dC9Bme4t %-(s"ža582axxS"jYY#1_Q4AE?j+TR۫9Do 7_!ޱj&H˒x(Q\0jن!8mowG63ekt1zs*h̹D@r`l9 :q[؆Һgd@1ɪ\2A謏@Jr+9Mt-q;+\~e>[?wǻixdʑ>90hcu{,򴝘_=jϐ4BI=oF@d:^,Pbq+`d҂}ʅlLiHZzaR׃RM/n.d < ݓ'F+މO;wd)˃|,xh%p]iUUT檉?Ի\5{;:wdRQeGS˳X\*;iAcR+h0qS/| *Db@$RA@EO,OtiP431יc6e5Fz*Yp b " /ٔB{!( s,[=z7ӴYjjt[!w6/=M$r oti__ȅnߺ >''l۰xD84iY]b\$ 6jg.Xǵ%YJeM|6D4`C#w`f@ށ$K}N)n R hA k.<Rd`pQE2|Y I䄮9p/*wp 2CɈ`|L.2VqW}P5ej+jtd3M;\߬sb˽9+|xPQ.g˥Rx :ps`Zo*xv^eѡݳgpBAL#*nzZp$x=8@PPQ5qZ_W<>c.)jǭɧ-tUt_53, C[z"*ݒc+fvøF]pCH*p2$.,j ~,ʏU 8ncs:swT|}T8Jv2=9Ōzc:'(hR""\ Oap٧(A;kHrs :>Bjx]@aNCa,Bfgɏg_mU}y$VB#J/&:N=>P' lN Wp(i-=ţIT W0S)zI@f %`Ң,?~A1]4i5le-LKSO(q9( I18 t귏od%/e2a~ ϧY/ YF&DTi?_.Ÿǧ$,%٤ ߈.;?Wa-^LDf`~}fԿ7Cx+׼x4(<߬Xob4]8 -jь)3ߏ>鏒_L!ʦp,.f7#^~\u'"ϓ5SjmgٕvRv{+7fEUEf?eQZt[JRbh4ͣeTHhi|)f[ƒƙyZeW!$1KoI$^'C 17pm \mz՛oJf|?8[4ztԗ5h{y}ȿY kU}JY*Cm]~ uV[z!D>tSYt75(}" #"gCpl.Jk)+$0.đ|} xolԒ' h$VK d$[q3$] 1$"bv=-;w:ʽWnv'͝=unLM/zBdxWmDdY,0]=`V0w.R_%L cGd .fGs7~eq5>-Caehm#ȡGI:(ʓzЪAs e[q~=zӱOX:+w ʰWc$tg_]_wCk^b^ Er3 wSKONK[0LXNgOd:K& |±NpOeގf|Hf=dv 3Ql{~6f'H+zOb-q9(ɯQUp.pU5pU\Jcu2\ Jkz[H M=/bJ#7lI_`Xa?0eQxN<4/s3Uj)r%S{MpBpirbʈ$3-(A}b^(8s#c) RZY-PRԃKԵ]reAhXa׈fhA'̍-Ռ ӮzL-x# | 5%g>뗧S{nmS1Jt=hFc{1dClt)KQ c "1F.xSUρ>8}4}2:9&MIkNֹ[cOTsqu~1 mƂڄ֒LH4'XKQլ З*?[76Вjn,I{mgjS-+5*dlzaCKU+dnYB Q|n` &`֙Jb95ZZGuiENewhBJv l$S օڢ i,h**u(:ݡ-!x<|i@yۧG$㒲a0P.;~Tb|eǎ bʋu8ǖȭlLE"$l S ixu Ό,ec`t" \?`yc֣6JQ| 9ԠMEX#6P[ Fe3@! qʰI&P}E׊X{B}f ڛY,KT1K=5!pBB%`񈝏v3诟MQV@v]&!jhX DŽ 1=*4lLPLd]mr%@ۈقj2AVa1 -O(v5!L -tA\xV24ET4i͡,CŸ0\4XFt4/1 JWLd:[nmG᭐C@8/]]UBNu~T}:MOUL l;VC_֫C(NCO5 CvX^EUKq VLA+tme,#z ԥ$b>MzC*jKB*q`\GQ적ДAaJP)䌎Ah[Ru !z 1 bZڝXDjF(+oVQrFL")يG+ca,AJ3-;kʐ 6"xtDΓiz3;Z!~iwtsX3v3۷1Ƹv;[G w wiڸpdWs[uyl':'~n N I@O r8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@O VӒ@r@/ȹ@@k;_I@O H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 t@^킜@T\vbb.):4H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 t@"జw1DqБ{'DzN Q $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@z:N֧)5QԿ /ݯtEK2.pJ1.qY 1.-7.b\z ƥw%;cCW /k6}+F‚ Z]1\BWb$HWӂ [lnK+xbV#)Wk]= t0~z;p/@W$tKɩ$`bưZ;7u%tEʐO6.pЋܴbI;] J)'HWʚnaWtU(~=Z}{ϯΑq+EGd]Iq=۷^m?0'Xߡ"JGZ)sBOnIiA4 = 7KiF4Ih Ҵ&* b~9{v^ ]1ZQz't銼WDWr`Kv)thwJH ҕ1}-ҕZZ ]WJNWÓ䵾5r!O+@$ޜrgzݜ+ksr_Cϗ_Goώw8X^"V/<3sMuݓkK/} 69 qzI{+a]ӯ#I;E^}ow]jS꽁<;8J~hB?1Cwnv\M8x:߮]*}̯mӿ\#WLgL%'w,3v3}vy?9sHY]tSx9^;\{'ʴD7Ϯ죲 #-*'m#YTc($Z E 3\ F r)?޵,]0ibf1"fd$?t?r'0k[k/-kP=k ]}o[]1CW 7RѦtA+'HW&" `K+FKzQz#t*3'Y+Zsdч!M:0nd7P y>mef{|#c )*caڜNQ\=n|{\oTԟz}>Ǻ־&~qϭgOc8B,oN_Z_:RqnTGi/7`/oZ2;3HW[1ݙ5?ׯ5_wq%_k)aY;:r1 Ik'ayWrZZ[k)6*|m#ZQp<f1crXQL%j[Vޅ`͹JvD6(7(*ݔBKɺvUU,Xc*)t`èttWWHW\KpDW\+Dpc*mMB+j+MX55ZNW %-]Bc JEs*YtjQw閮^!]i$P +,8i ]%7ZNW|SKWM+c34S˽S_S^()R+]ɖmz1'M+m ]\˜ NW|?YKWf5[]%j/떮^!]Q$k ]qH-]{75X`C `MTSR4ix +'8eVUB)YKW86I]%5\EBW pRr^7I]% bHp5n ] butvKKW$7I]%9t8 } P [ztJ70kP=)tЪǮJMZzt)I +ubJhYp$s[j:]^!(9^S_\}buZFN it8>AƕeNF NltFf\| hȩMw7TɩT)&ARLR?ߤ?r/0?P .;O{Q-cjws<dh?_$pÇ6&\A|]l͓FoC *S&J*yU5`(W9q/vϹu`47:–)ρǠGƤO $NsIg"o} =1̇e e:\E7@)&yb+t@Hm s)f{?]Ǽ=,fAnKsgk3X3{:9Ls3RϽ4/*[E q"Uy6|gj6&za ,D;i䵮^ZDFKtWo|?=.񰟍f2ɴB02үb8mԙ3e G٨|9X2EV<;awTcz! (x`f Uʔ OG]$*+ #z`(#BXYL^jʈhA   1銽轏qhOBI`;/3s忱k[}􌍚pk9\*$#3B6H[퐅QYaG"t=Vvb#{&CD)1I D@XZY,@Hŝ28;Buo&:s%M ~\JKܮqM hAw1it}՞LL}Coi} QΟm^yN=]X3yd/=Jj窖V>ȾEMtzӫ W{uWGp担hr^/+sjď[*uMSli靦x_U][{`0g` 0ԃk w< (iW9͹g8@o I-}$ ^u`9J&R"%1̏Sfim t (O=@2=V pܠp PV~Y:./Q*=a0%a*##EhpR+BF&޵O RC. Q9oT-aXWґfQJs9MP#Uۑ֭ԆHu%q,0 ,PCW,hŘvA .-5"ZjttpgXj\ !RL;sOANO7_KL GiRN!GׁA0zqe *$τفy(Ps:;7 _>Ƿ>^B߂tZP\y)9O-*+ʛMۡh][*|kWVk}4*n%o` 9/$j~e$;y8..R|{3},kWW2"{mFΤo^bLSqϝwbTsr4M9^톯$V7:M蹜w+_W Hbqav]I~YȠJ(kZKvWR!_cl\2kcdOg$Z|8-.;[n {V0='wҡrFD鴚3S/~E֬Z^M7:fq3yރF+w|/ Լ]M<8EsN>b!uʈEtla.QԞX~ݡga!~dI JpM\:i(&*pFT2:"N5S80ik7V& h*-Y~27[x~/D[T>z鴺rVlWfq"V,heQGd( 2fZiD5]ӞU=-=-܊@V$ R!#T#e+*JipdȺPV:d `"Z(@ ʂЄHrogsU.!x0bXѦ7fOCCUӠߺ!CZŀa 'pBF\Ñ {TuNmϭffzpε‘p&WzQa-xF 5hT0(60u3P"486rN5QAJˀDR0"i28;Eq .""1,'xK%EqcK QEtDX.PeVF;_D!qAoiJH %A a8= $RXtNRuRMIu,9Q’i(`dB)3$5"ϣmo."2twm$_!]pc YŢV4%%{ {S4fiTt}Uuuy_9P6j(T>ļ ZBoh!2! Jާ`et 1kRd֑k2l6C^Y;[t^`]4muWv HM'D3;Z[/OϳH#&l8f\p&,C߰FyY\ @3"+drRDÓ\:- -V At 3J%Ys]7- K8˶}[ڲz,sqU?]]g'uvކYH57Ql!GЃVQ%{íhZcÑj8i0ygQ[,VY{`<}1R ʲiϦ.>j RQi0 MiM*7*0ߠFd%&7:Xzƪ%d؝!8fJI*&LHTt|{ge~x}ԡؽaq؊>`ͭq!?!fܺޗgǶXLTOJws' q&)0o2^ Nq%QVA.Xf^ e`vo};6*M$K}h0yEAJ5CĘ,4&$f٣hɅU=6FgC}J`umD49d_R)q=3Vp=>{zWr=&W(ւt{m3d%(YNQ6Gh 3D\I5 rC}H:Y[L I!)RK7#\#jfDKӬD_qVɁqVr;M׮#މӜjK@uF>U9v׏c8Ah}P04} XdH;yЄEm u8RYx_YrAГ N£D]& mR2Tmd&ndUaaq هG\ X%*c.̸c{qux _WDAPdgc-rNYF:JJ,2 *bK'P=ID!EMil0 Vh3v,OʛR&qog7bt7L̾vԖl@n`K:訥(c!6zd ^&#%.q(fx$em,t+rEVtĈ+%!Jx,Q,>=UMx}AjPD|vJDʢ_Q҉]hȏi @kƠg6}ΌUxI+}6* `Tq+ J\VGYWf\uV%⢬r"W)gȋId5<Ϥ D',Tt(BJ<x \MZPSRBяGաN|!}N9L0(IɡI`Cs (dKac@% fA![=zEI K Ӣ#2aVZ:]_\N!0u+ %PFS ڠU:0&0Z/0 o'w@f2&Ξa3\Φ(w⎴r\?/~`N=[} hq )Ig5ϟ0Ձ2LbZ'Ѥ!ha4QHA$D4YyL<7snTcvA[+F)UJ5qvk<$>MW+3ڋկ>d~baaveLdv-q<_V(2/tHC4(_xݢaiG0:ӣ(equt?NI/~,׳8qzq:\iW'HH/Z $YSճv!%𿭱OK>h ek"\-6ҵ4t-vx|P=--6,۸~}\|~C-:ayօE>l2_tџ?mkp(써ݬkzb#\_HluG*QċKJ=ӍC4"W%lln7}.$0+) g8͎ wqȦųac!ZfE4RBk%N0+ DDWe. o, 2GF$\eP1&H1ോjN˲IJ"R 3N:y;SjqSO>ωɇ= #NOKw vP,30wHDW &e$15j C$(8=$'6*'e\h2 r`٧4d(߹)LnEq~ !UD8pA~ZEE1<4VOce$ ~} @cS榞0Z &q➑(iF~EH\i]qf{OC{t/]wX6/KՑU)y"Brvu]vygRt1>-~/jpl,QY.i#LXEȯ <~KW XcgL g9^I9B|Fs )-!mbs=9Y2-fN",M͐--Mύֻ{f>o9HW`"вEZ?Zx@V\ ΨMOy=u`&npcr龼aŃN]ʆa_<5zj6 #/N>l_x9-+}̓Ĝ$1]<^GXUJ9ӂUIuc}#SD^"B($h놳}$ 4C4@x"găsL@SF4(x`٣.z^j^\qc`z"yFz ,a˻Ewߢ,]5Ryˮ+#3 gpZbђ$+D"&Xsu #{rH Tޚ|PJIh2;h"kl zo^Э}[uBRv`Zô fq(Lk]̰yDY}V#1ՠ+$ŰA@Ѱ % Yk"A‡Q[Yl>Dl6ZE(w e +@Eo %|&%"BJ AzHh>+nF) w΅,-/LڱtB} &y:uޟOfkjDi'UAEp>D}87NW>-(ϯf7' /7w|fI(ȺAXc#w><qdd/*0Q($3PLPeHصN˾nY&'VUzG8FKFK1*P:b` kR42W'k˯8էsoCƒAօSPs1W4:mm>_]S׿bEZ!^H)P ,?ApeaV"q/Cm7 W(& "/JN&_ \iϮ p*?{gHnEp Y7̇/fc;'M~%͋vԶfu-!Y|Xl͔p1JimӬ/FWiϦOg\u5 X>/jnj3G <-]J&jHW6+ ܊@L]WJ9$ue7+]kC3R\gZѕ0u]2nMg]b7\m^}zX6\|բu /6 3ʂܹso[|_|__ڣ6/8׸A#H_cYDgJNL|꧗;$6!M+hZq}jEJyV$OP-eij(h)6sS1+P֍YW+ؐlҕrlEWJSJ)< >E]!])3T\=; וRYWǦ'^@\zu}:D~S-&^g|@Yzbmvbo.qtXo<[ :ιץdW]2apwכn~gjy^]|{x/}̈N0)luB>_|q_- |ŋMgЍ)o}zU~pZyeÏVQ8ZEˁBEt~t|nxۋҧkЫJ. njbpmūљ$bW(wϋx˵9oo6?nu;]P=m~=tzsK\K1ͤjkm+rznvZLLhHW R3RvfT覮+|+uu*J{j=cjFXJȵ+ v^k ^MAgJغȓ8kmGRiݷwκzJӣ2te-ft\>C e 9 !ftY>KWMGZ"?ғ𬫓U&iϭs83 Y]*U9i틻|w?=>\c:퉯~U}auErxrm?M/YejQݿ0/+ ˛ş36r5LZ}X~\NjksCbGuS-D".-?+zװs~}懐g3!Z =P%lȰBiٹ.SvElj|V+V~N+<6^ <%Z}3w׹V,(mYӹ)6+Ѷs#4+M~ 6+o鹺R`JqC3R4m@[_Ϻ:]83P3RZ+L~ *Zҕ 5nlEWJeR0uMeW dPqC3R45Ǡd]=ޜ |s 渇#MQ2#tef]=}ŇftQZh٩J)̺:A]Yw\]4+e׊֙J)=Ϻ:A]!Φk܂&$~疮f.5iiGӊ{g\nVfMsKRѕЊ@+4R)IIJS R3R\kZѕҊ)JYsu 7+A)ͺ:A] η+{Jqm3߈*-O~RJuu| 3T\̽+߻RJgg]c+!])plgftZ64u])'_Moj#'5q|(ebO#teg]=KNZg{8Z+4ueԒQ3ZVt+<ujrDrt.|fLbb6ajd1 cfFK9UwOΟd2$M4fg j䰵mnhĹ'}@ˈ;k[!5skAqiho-(5 > JunJiS@iYWEW.M`8Jqɮ@kM]WJ9 >|ط3T>0?u])%YW'\J!])39#W)bd!])pjgp؇ *弊$u'jV;׌7+4u])e]Mg2 cj.qDhu8J?1]]񬫧6I\hHW6lѕ:׊6 ЬԕM.ؐߍÍԊ@+BYWEWlP4Fƥ&(ij$c#ʝBzqIfJ&㦮iq jE=;Mc3R\nFWJua*OQWpK`Ԍ753 ߳SJ;߳;E] L-}Šѕ{(O~ru~ήNQWѐmJW ܐ763-O]WJid'hv])OG;J¬/FWg?F7+G8cm>G&[FJf]=I6t،Vt|42YW'+kԐiFWˡ])SוR&uubôbɟ[i .Yc#m%L`(h*W\+VZi2k5͑Sh樢QsƶrԨr-S<L[ɳ4r"OjpQ-kQ-;OB-M?s5V\JySlLqb1Ij;OJq+zJif]0>0>b+^?+=rPAE=D+ ղN3p E ^\5_H|}B#;B\BAyio_^u卷 wu׿~}]n3Z[ow?dyjX2uqG3Vj:x7/&W15~ YN6mm %]^\\}$5]RvlK;g29=:Y;Ij'}~컐l;12 Uhra{κg;0. zS?mg62 }&:P6ɤ>KԷw::e΂ ;i%(\+"F]4]_ɈTcQ>Л H7w"U : XB}ԡXW 9Y yЧb=7g1("pК5d!bt$D{S!KߙvaK9`[d4 J 6:I`CJ0#ѷK qqj:aءG}v}WT_e)[v',A11uwчzdѶ J,х~#jRq!Rv}!!az&WH5cꌐMfCiBﴁVb4AIG L6w4-V:db=R!b唳ڌd'2a3zG&+3;|gz،T ]2Ȍ1M9޺bbh H$,DAd"X*8clyk! > v0TT#DJ)R%tL IއYD VaG#x/HJT̽TTg爐Qb.1(c Y B!AkV(@ QS6/LT:LHe]Cqw1#Qf]HJ 7>&y]]"` DJ'6Lٙ":Pl `ebCE0 dc c/b~Lq}ɻ3.}$ ʭ޿\mhkC\ROI 081r>1f(HvN`"'$2K`z~`,zۚ|s?悏Y/PV.` bГ𚑌W! v{,*׻9r. +C ^g i`<ǔ'z vcwmKrw;V7w70qMHq==(%J۔UNGhf:=vڂ|A cA1J# I2HeuhR(T)pvc ZHmx|Q@VǃVx nEf06"Yԏ/X?ON_żӬ(al2JRp `|yOf ~^m xi9TX] M("f_+'G ]rK x1( 7Db6y.e}tbQV]k)d2k{:&oCUQXÎn~ 9!W53K:vhs^u@XK4iQip|Ӂ Ҋ298k5vil#: gmND|d)ebzc"Õ,5Ƃy7( s.}p^8,*VRXO'R% fcI; 0]1 O4LkTRp0U U-eVn5+An$Lzt`_5jI޲0!K-hLn8HcvN1n,`ѪI;ɤut+h6{=Zғ8bxd=vFvs~8bq/c ] ;7.;?f)_z1lr|߮`o?<&f8uH)xsdO.O6ܭ.XЖӷn>pr0veX.1"Oru\x+FrgMkJ" /Z-envqCq+ӲDy4Y7&.oZt\g|P=Jg\WȊ`5 '&[Y^Dy~?Uje"1ɸƿ!+0>2/UjCD^i@l݄ ɥt(NJGi؉crf< g~`f29-F=UkхŠvxOŵ`Xc-ŔB:GO#KfIN7K =Jk8N'Wq|9Bj.'!jՔ ?.?hMX{6jY|_Ъt/G.|5j)볟Ҥ\; ~[9M_ex.&aIC~> 8+M3=c΢֦qy[7u\;,&'i^*#R3\f" H[#`D!kPɹ%1DA 1O]Z8CC]?{ܧ_M lJ/ppap{y|_;g[bM竃\aܻA#Iq-~(EdCS|6-"! 0+88= Wm ą۹/ϷIΫ:gg93|cy<,CBm#BPvZ>^w6:_6li\ K}H ʝK+6z j kr ZfT3Uթ/jȹOQCpӐs'[EC.Ղ\:R|\z*d'79"cg1ak 2f8b>1G~WXoo{3}ZP,ςژt{d_ yvB+KmUP붝p0:XY0߼RKv Ӏl0svgW\z_6*51(dQ餓B(,mP.8YmjpCN_q3MoҒo-l(r"YUR^K"~HpP+-&ē'}P62 D[`p`"2ŻX.Fp%e6`jS$pb`>,mx4]NSۻ.%-]!%%s4ŸP-327d5274ۆfQo; |_&`.rɷ4[Rv›Ej1TXZE2rK&.!C'h]ۃpc ƺ4;~/c0XSᐮ?[%4@5$Pgj6(7xɕrpۓ?4x3S%.J:Ùuސri2CǵM;v9vpuΝlN3wнQ}gW=5q4cATխEM6ʥej!|{S_™Б:laן3fdx2ۯT9xȡv'y`v7R]v/fK:)jk{R;{%*y7K=aDvߖ7^_ wZ8rtq"xFMlVn aLă,;i2yWm殻;{YMO&7P0&]D=LX&oSDe5wtڧ;A%SH5|}(چdC|JLvۍ.TuWuBGQ <}@e*Q8ew%'C~ 嶡_Oٻr*A %(w/׿[n밁3|qv]Hd0AyPe@6 um*%H B!U\6A?! I`;ËʝLFLGyR'bImq~~ -j./ۻMuހꨶGh ,8F׬L`+Zs!aGµJfNPKb~&ۻu?{j.`;͇(#Ys>NUn:^KonӅ Τpv'[FF9-^^X(k1{c1$p?0S1gŋCT~@897ųgqz`Yٻ6r$in: Xf/[ p<%$;,bw˖zDm9LID]d>~E%cJv@N+R"ZO$+2JXsdeY=Ϳ,Ow)Ƞ))2Ť2D\Nze)(I霊A[/QCZ<#PoGkuhwB ˊo#F+sL"E:r uPO)z59L 3Qb s<v`dgp⣙vы;>FJ_vcn4Xng ܻw"]}:7m<7ۧg/ou31-΅WL|2v8]/dbyۣM;ڟoSR;'84%tv0}tMI'>'()r$d ~=K:0)J h%3Ld)LV58W?[qWG?>hi }P-H_xݧ}՛l}n]v|5,?,Wq*^}ihG~>j]F]o,l2 =`㻟d]QQarR W|v%jwM< Wf.^-ʂ8o`L0fO aT ;P8'"d hXhCObAObmhrkGS(#O 7و̄D cYm:tg]fN3lMީxc!=W,|tWܗ `kݿ3YjYr0r?͹}tqAiO9)7ܨ|}@oKd%!8f$'>HsZe][o+aWή l/c}EyNW:&߻ʎ1KysP6a[݄a3Q=匩pz> ĎZ ~ ̛r.ŕF&[!bnE ,320Pw5/sw@- R "Ƥe()ަ={`v"UM R2: :SsmD49d/L%[=jkiWPC}9J(ւt{m3d%(YQyl 4kw.VUԤJR r6}H:;I׼-& sԑdd.TVj:xn-MZGJ Rk@DTmwbvF[WOzm>t9hצe.-cs:_N5wӌCj\ȄX\CV%&@ҿi΋`&t.j+T@pƑJŌʎ7%=)-\Bv٦?!?i />vuV{?%)0_,cg >%f4rgvME{P) /l>MV^( SnruFMz2$uGbXB󻜀[+F)UJ5qkz ^sN9:h BN*X"< "O`x`,&rA ẽ.:FK|NH%L]'gnU&Ĩl,Xr('<MѤdEjw-r]JA2sVg;0L0#}gu/*->91qǥ5! vPm_/66tjr?f>I g 27ha-f%jͨdIi1> xtY =|kȣPJL[4Ӭ( i[bkb#H)sEV?X a\Нό/^ї jb­In%66$Yp~ p-C*D m`U֛b?jB UeweBJ|+m 4|Jol SFJP `R(225 l Po2ƿT=݌6'֟S+;^Ql4KybzH>'ѿ]Wq6(ϦO՛0.INl:CeiҝzĴ"( B(pEE1{GI9XO:2Mb>ҧ0B+a>$;-&m>ʻ2TG䚥=ǙݞS^.{ov楴Wͯ f}nu Yu~)"VuMzoMޚ\jR\2: ~/hplH>5ӜGGLX.ҷwn}z6 8,qO s:=r>}Z nx$ rl''#tIzSs%wDR{{Tm2:xW|d"˻k}zo\Չ)obY4q]bJrÃ{;uUV "Dafߝ~+g>Y"fl{hq Y*a>`>z&Q(4^eP!Tg|~XB:dJd9.-_&2B;q,}JY=HWb'rA _@ q3OCUmnJ.Q ^][R"}w?iL7NT݂=|1|ݾM.EP.0xN# Eb3åǹ s1*(Ǩ^ ,VZIxpb#4hVN'+X=,v8{'էjAK"•T.jܖdʌae!:cV@ΔuLS@:RFOUHe,^#1 @Lgv 'kv^ZV2z 5Nk}*8Os5*ݙv-ڶ8slъЫIoЛ3[k)fJ۾BY]IKW'?aG ː{\*yƒV2$IֺAc Ү7Ĺoْ7<'A>(DK9&hMc*7y aaLeWrDK`=_ͨwn=.uY:o)ZXeBY&nyCQouރX&Q[|v%lzAl?ޒmW?IuZ|6 0&e"6^(llVrRD&C"Md4XoTL$x2>ҢU%Iv";53X6Ȃ3$OFgC}J zT"%\ɑCFj6lWg}"ov_볂>mb얦Ux%'rhݕ"]s^Z.q|fynvch޹رulW4wZ\ٲ啚CEe8a>ogT׷'Jۣ-Ssei6|.7=w_T{˧q<4?KnrPi U O*gݜa|(4q Yx fV9^zW05a5bd%K}1RJ)"IVBgj{8_!e7]]f|vn@8w/bYr$9}/$RɖDZ8!93UTWÿ$('#WtFd2.2l 5'|4(_}6~N=6fpU/P񠉴"m6%@'dj{,|IT]G]ǟ9r&Qw`OAS}tqv2`G®ʣv5Iۗ:Ԇhuo47c۴gR k5|QpeCc|M_eorA2.Ri='| ^!6l I4I֝.%:'hSlhs٭|\ _'f\Xv~uQjWgw|-n;+-9d<v%Rׁ@ QEZbOtp(+/q}~^PE+}AҪa7R쐻wUvWU¶*ՓtW(aN=чF{I~F=4+y~7֌_r#Y<;_/;UE`^Ypr~/_fzF^oYvyn~ *c>c̼+.::|*L}步0uua=.l-Y/4JtjɚZjPDYB.(9(%'-FQvߚ62TX2/*r>")I %eI^$l\L3oE-R_%9O)|ǓfgD4z17h:;[^Fsqq_>ZU:Wpp䨟dV>UY߸l09ri:1|^5 Tօ2-W oc^$_$1M%^ |H;㺲073wtM 3yq^k^äZG[>~0y)ZQ3>+}rEÚKX ,߄~w'R)Z@2>d <%J_-ynW.# ):bZʂYSF0$`-ME'z?o1&@$[^ATdgDXdD8_@RP+^8iIۺX8"<[agIe)5S9 #$qlbY3N˖*tv:z2z#?6xR1JdqXaTd!|x3X[V,jhb|TdM_Fja|M)t&ju10HzU%zJx!-# 2c"hg`lF} LCֵ:SL~:k ˙ -]wPdZ0zKoy8' .. +ķy|cڹnM: NKj&IrA"F.>8ݩ+C%R.:Ags69چuHځ$R{Rrj@J (E>Jcq&yv6eYgdu򺳍/M}:NC|2Ov))gOEzr٭ߛ?9dIʾhC;oQ?9?r\&g 5Vߨ;BiJ uT:TRI4B7 Pԅ}p1X坑t(8J~b}k6߽~bF{17˙{~W:ho`6Z$$e-mu(%(#r}IAi TGQιXB`Puڭ8wCwW<FKNcȉ3 K6{(' +20 I(Jɺ6S@)>$W1IQR -{ i f o Q?/I#ovյ*zD?`Ե͗_I IJx@otZ {|oa/ʅy7Zy͑&q6A1HbHjpR˅deh:_{^MB.%X_>Gs~7-^qAxob?xQ]yXakOa,AsA' &XBj2iβtd }Ls+aBڞ@I%[)_۫E,EPhޤڢ4ZM!:5-j:tyzwMlgk08$8[Z[qfR{rKݸsKDwy %AvYb0 ߑ&@lvC4]eVo*'U i!'+S'LR%/"0eRƢC/$MU% 6ԛRMaݕOLWWO\ǿ=KĬǻ+Ƿ%;lf!{Pi?|:?]H"wh]E %4(b:̩tA$ݑQ׀& l6m2[AlE d(R "2NNO6&dXm!g 8Is"R D^"f EmhP(xzd#k-RBΥ oHֵc)~e}r(cFb#r!XFbcCm& 9-rzrZɚL9Иm^'g_K*ٵ^}/:N^r)iTTty|WV3z8(ِr k#?gh@wH.i )Xg$ƱIB$jƛT!fS vv,1e/;丝,Je.;q*uEC+zR*Sx>EB&k= |&áWzƁX(+;,N[͜x؊M֠CAjPDc#"~/(hB/W{8[HD~KLyb +"p5:&yi4]ֆ0H,6ndLFz!n9{;IpqtkXkzɡ(+qqŭhCʙFsEi<4&h OFiYvg+D"šaq(Be<v]ẖWU:kȝbg8XhAqnl҂:6 'H*M+h;}mr*xKbXۜ<7F4r`P{?ȳ4nn'7 7cz=]uM ii&^e`ײC+}tLWI d'0QŐ1a!]L`dcFD8 "uF`x`0,"'!u= ẽS6ZFK|VhH躇,uL,X|h(n;*Q V3g700#}݋uٗ%1p{PT]OTb{5e[DW[jyf_I[g$1h*OB$\[1/QLF%1VJøQgtA8BO/֐GG@)Q1ePk"{i'%近y@K!t[r-q$k966 _'5obQ'=o];-wytG6 (uiù2*]eX]G)Rz꯶h#ݻU<]V!5V2ƕCWj̜/DBv8eK|+E!#sAxc<*@E.x&BuΗ:!thP!MVa;auAg8##ers%*Gkr^8 c~a*Wz`ȟeZ]%W1\.OP^T\'/-֯Qk/²J~[ EB^Vۗ^*[,/e?|q$o'@ cә*z~6up9naHVQJ7S?K>8/kwM&*n+:ިwV7õ CbGRR_3F7 )Aj~p@!-c\j风RsR*1[O m=YW8sQPJ2Fg]`HAd_d&r,i:Z5^,96{;į:tkeCضQvu7f3wq+u6 *$VCN xLrI\ꑔ9SIUl*v9LZ]Wչ[B7'n{ iܻ<x{{絨tzVWYKFOL@tBkIP“V:_ST $5Hʻ{>FݶЎe$e¨gp:=_l%-TFyNSkYI2aEò/iCpyMz IBOVWqM "TEvE[(-^T]Jq!A:"կx) \g@G! DCц\{A9Y]+ѽȓ}5 9=< 煨` ѨM4mq[G:RcB6\)~֟*!&q T,M=wkŻz.u5,AMA2䍒Xw۾W~7=<_zٿnu=nJqtQ "r4ruNS]ƞu]\cKQ=+^QY2{5g^KbRC>[4b>Fgoǫq9u\ϲ9X'<\ksYT r2sJ>XH5Xz@h a@yR "Ƥa(!1ErZYl@XcCPAO'~5G'FNoks9gΛM^+x>:{SC3N0WBT.$KLlc"7[%QEP;a-svi/NXҩZ?JJҩJAy^kUTh i c< LZ(BhKhV"4bFdeR̒ JH{Sx>EB.A9W3TmdfndUaaq J6v#v:F0=2-p 4~:_NO?3#6Q2 *r !6VZI4Èˢ 1dNuܖVff(bG$&h@ M&mǬ\sHp>'W˜݈Y!1z@P[Gk 6" `0FVe2"w%w% \"EughR 92šq$lD 1dٍD`(8D?ED^#"n_ʢ_9Bq^ut]moIr+?%ͪߪ_Xw8&߲8T5%H^;O(Y|)i^K4{jf멩G9Ώ YZ B/ΰe0Zb3T;C75&>V褉55E^+q[/2?jb8[v=XgU]ԍ`7Zor)IVHdu^Vv,D'Y!m&.Bβ vvTaձ=45)s=呐eb\S3J&8Rk'ΰGQtTp 8pr8)M`qE6~Q=n9筑S19a'D1򱚰ka6q͇ R?紙wgp9n3~;WhRŷߞk+ ) d Ha 3:ӭxWm<͂}pD|[q >G H4XsT֮$-X RBT%bicx&i0=kM`y^H iM x:,fp0+z6I!i%d4XVżuݿix0A#[j5חHd;f`rZDGdnJ:6 &v1b_w]5:q6Fv]@X#l/0UtI_.bKB f؟y &,3Ԗk>@(>EO:R @ ʵyJAt6}אBBJ R| .:IxI\d7|,ͳUZs(Z_G\Oj͡ѿ}?mF"f:*&k_Ly:;+X'm\͈E0gVPen.Aq?j3v2yW2n{C,OXč02[2%_?wb\|1uvwz;az+Ղ]y`_$u9J_G"VuSS[Rj\S2Uzq860 "S 3 @;yH楻isSyo>]80n5uD X2zDCѓ֐m ve!eIAMc.Ƞ)): e*S!D Nh4HMPɝ%/I96pl<nQ`gOBniByIATi)J_0J2^װ4 Y#;ʻWTNX I`{U0xR:&ױG.4" " ()00 ue *Û`W& :vB:6ͱO{-uVn #N&8[g4 J>w`:uY€;hpzadyah1b Pg)dA)T۸WmX<Fdኴٓ*) mQ \KMXd y4FDK:2ZEʢ}J>y!D L*8Qe 1yGBQ!(%:vtrB{IJj59yBYN< U_Yo\ `*^CM!vtȨ $rfp_cE+W{=xj왵(g-޼{%[gm #Tns:ԛ99̯wzWi+_wh[ӻZ/&ӯOڃ O糏cc64Oׅg럞$^wyZ;ןO0+ ?M^ѨMU#^ވz5|f<-6?z@{H:#ٲ]*~X=U&k\(ZSg0ԌTZuR()cl˒4avKyp!|k/%u ̲fy>aEAԖh(\sQF !YN-#A0M!rytɧqʣ񤺬iC]arg~~oroʺG?6}rr @?[f{\;4 Nhݵ,G>:lԾf,s<67keΟTFC  ۾zlhߟϮ_g+_?_> w`TUZ@J_u$EMBj բFI(O0hz()pRkbIZd-)} mC R&I$D62>[j$ H@.Cم˯7oZ]hVo~ߖjg M=Ğ3Lr{y]ubHj_47%.-D yv< LG-;uH<ߺ*aû4"_}9^X`?.(N-Xw.wЍ{m8ϛyer%cʤթE T"h 9a4 6QSu%yW1e.sgN!v0G0+KnX֗O&.-cJh'zRW> ٴdXO۷Nҁ Ȳ$tB*b0p-2 , ;oQ&ܾ4}(<7n1Z9QKRCx?/C'E+7C!tguA\ XQ挘>\ur=qxǒy.X l`F9C֦k2Adg1ыbIDoCZa3l(/꺟s- wOȍ͹tVGh;Nee1XoWGw w[{7czoWt{vp +#7da\V[l5ꖉ[j\>7Ksyˡ;XOв+yb{8~eM*r&ln͕{X1^Ι.?[PE 3O]3^Cz?P:b*vMR,Nc!x^Wx>[͟ZKzoz;/% ~3| ^zH%( Mp.*JF)#Q Q^FUlY^zt7^ׂxmU7DTu425boV͸eG®w@{u'm()ݩu@fh?p!g~}~kҬ#0SaݑVY)r4نgDU1$(lMVP.84Q;(%*-Z%9&0m§SJٗO!D\b|=8v7 /5\, w5K3 THJ'RBC+.+_\M]1Z|7m b$?#c@#tx^Юɍ5D]O-j!ٜD]QYuF+Aph<_K2iݹ 휇v[9[gC|e7JH. ݚTr6\)|~/[#Dt;+(c*u.e A0 ՓCVO-,CSI } @V$U%#wmJ_n'&g4N@M3vmK强}GKdұ-9:3Ùp~ ki؍IHtj+ErfUX[cLསfuFa7͜痡=uùaSvi:_SUy㣚dbh;71'2V (fScO ER0cllw>g lM1uGaSl-ZQz< YֻX, (󒓻7,Ij 6"Ӑ.jJCCKJk$qry{tbw{Ke>=-:G],$=g̔ΐ VJU<4$Ȏu%BRL:}0=1Ų=vk27 t[spD6<"k4$"9ȍ.in϶cPg$(tr~ 뜰(Jea0Yn2.9=KGPy3 B rC-MC%sAiTg,bӺR|*FK"M ]2 Rl;9N 0 @bqPr>K>XF$k!łRs{ =;g&\^7gC `Zg_{x#_ m_?}ߏ/d}> 6Rb7-+^|wσa/R5S%ЗUvۃ9ma 2=vtϏEo_~?_x_ÃDEj=Me$X1 lBE؀kMp՛V5MMF#VhvڮbvɟeLM۽H5tqxe 6 U⭿Gfڅ0(̝WmY&ތ^ϦY^Y_Lޅrh n=wX+Nߟ~wfȑ9)`!'(K Ұ54SZҨtk(|SW6aʱ!9&Kr8\qa$SL?֚0plY+G @Y)5ݒEKS& !mv&_W9۳gB iFYBJOS4%e;ZKKrrXvAYr}u- fɩ&c(,`VR>!s;YQζ/jE n`T-qn2RE+጗]y! "=L+l(x.EcE~dWY)-NII' m#hA٘^;I O~am,C%K֔"%H@1Ypt;hS*NVmu_fr`^eNFC7L;kMt.ѫ˚)2cϞ]VUi."B4Rɔ&z-)|H]P*ZV Uyi4GϭMAcG49곋YB[G ѝxL)6HGgt? aYX;]N4-S 5$J$!&wdáAcj R[ZX̗6YiOr{o>M@4:cdYs4/{~*d~Yݯ$ӮL裣\ʳp}eQӇ>8u= Q-7<9>?w񉬌d!F =|F u7Up:ޣs&YeRRB鍴kSz;Z>E ) yU@ss5eSJhYԪƺ(r,T(@lJ<Iے4Fz!r6zM,ˮ1oS9 #oKoi ـ}GKB,rJdPk>ZF`d ة@hxuUE+P;PtUWOP]INm3QW\ܙmB4ۮ 3ҪR3;`w,uU[\| VŰ%]l ~VJ>:Q db>۶BXeO0"jnl!w)4XJkj{Wa95 OBg-Yfv\EcQ7?>}^l8yOKt<ͤxftMg%J,J-4REmU9Hd+ym y&ˑ48dA;B~i08H.6?{z-w^ct>x$o vv:|HS` >hDRH)e4Ā^ggx˜eu;lGzTod-pן%6ÔiDrx(@_ʸiWFLDFEK 7~MvBsQ]zQ\wz|}fʭ |[SD"ǣ]}euH?6כ6 r_F(Mhn~KbȅEo?f#3VЅɝkvm#ŏH_ 5VM-2:gHݸVL*!TO&T1sZΠ)t28M~yxR`bmIFZ`kr,Z ,ݢMp4?ZgQ:i:h0Y*)b9D=2G!e`^w{DGRdЙtI\eP5Ƅ k(5fuFa7͜a.o/MJ=h0+?e[C>3o8nE ׍jb>)QqC `;\3Z RITj*HJ2m:W7^oO7\[w#mt Z%xV,s0DZy^d@" 8i,xRJ Y\ƨ- PUᘸLfv 茜k^xZH?}n 0csːrrgOKqi§nx?.):)Vrp!Y.Wx g 9)[*;C"h+U#܋F(}bx:A9:]KU";K^3Vhi,JesX6)HMnG_5b @YKWK-!Yh5PRz⧵1%fEYz e֑)HAԌC*{ TU<(Qg꩚k[ʽ/.s_,)yz~[6-s:#!dxxUG^޶s盿ϸ.?z%%o.U(K?^OgcIY[w(o0{x]{2% u8cCN?^oFԮ|y@ars F[:.|V g'Mޣ5?qIjVO`VlOSwԞT>ZXlqL_= K6eʏa`觟X6v~bDm,qޱdm p>;»m'zI1@m{ugKg&즼mΛibu96: _Ŷ_XwqS6NBM}C 89ڼ\/]τ%xXL[, %8Ir/ vBJK,P w0<\符 Vq⭵sꌖ7O^{BTh`<}zW]g VCx~ ۪/熣ynS hRBqk+wdqҁ8w1mvc c*yc]LbѶvgaj~u;ڦHqb; 'BZpVl6< &b<'%Jj #bpXUHvXXvjm~>}>H"MXF8hD@3X"L0A"\q))\bI[UI$%x5Ĝi^Y*}ȵ8cF7PN|ޞ=Qy4+"Xß<aT3{0|:奪_D.'m.W !0hHVN:[%&9̀ǚ<җfDϥ GFl!NDH<a U[k4Oa3 3ݞq_A oysC|v/v֣Z;{"38; B-`\Sg6S`^%>t *UTI3VR$QF&lvO2@$#4Q="4QN2<,șS.ZEsfezb>I |2NiPP4,e!F%38Cm ]BD DhRH,2iJ'zgDύ&OQ=IޝI,uMV{tWIAx?SԂNA{*jD\uѼ72?K > JAe$VblWnVl,Gty'}a4{fO˺ZGvwC6o:\ tVb9s<ȷ)E*Z[/(*2\D,&l> ^#w+%f;FQW2**s$G/)CFꌣXLB u JFe,&hŰJ1[Xlfi 9 S` w}ȸ޸Y?_b<At-6B qRPb%21sfd&ߴ!A(H,QK,e%ϤJ>|0Fsl6^ Dз#&&lV%S:8-Nš]lvjs2XnUDe p4:wz#8`-TȐ =mMRd+hd  blS c_,bE-",`7GWuB npgD':00D APPا@ŋRpjҨnZz9F5g ASs[aXLD||Y7_Bd_ E>.n$G1eζ@KIyP&(5Z*wg & D..CQLؖbMz "bgMECܐDُB{G'hWG}o\ϦJ|3U/9*0T tJpE2A~iwuA%K4HsVKA㠈d҄ZFm@eF2(䀀,KsH.G~ت!by]̇ .'Mv{lPA}JtT$RFK!8ւYC_P%a 2tʖAT&gX- BL҃AQ].3)՚1TI=bl;qVxttH]<{_~}t:wW yGզbvyTU yx+Kq$$Y9+X< ˓tw'xN^6!2IBpDS k\Ϩ#b9u2Le]H\Ethd򡳆hX.28 d$S魙Rl]q=wugK`<׷oovԿ ¼kcɭ0vĜ 0]eUu!X1^}**%+?2`ެgj&t-MxTqM^<{W~Cq>Asj.)j,>L\xBV%+äNJ ԋZ2E8JepaB]zVYkd~hA^ٱB^h]⽻4mz8vmR5dRx ׽6깎GK_zy;jv3dc_Os=?YyR:H( P'LȫTgB\""YIzS<+/KpO,M/WV֕9?v(1N@S?߾yzH.3DΜVB}"e*шP!*2*-IgH[z&$hD1Lb Z)!|0%6/[`ٻ6dWz.6cuW_[@{ņW1E$8>~Q)QP"C |MkFv[olt-f]jKZ/bNy;b~5iVl|\j1'hAk' OaȤp٧:Llk=gS5qm3aƄ{X6|o\ľ9jzcRvy'\z͚)|ruu/Q'VW#W˧UWȧQW{+զKϵDiH]ި+CuErv]]* TWр#uUF7ꪐk̾+ֲGWJ.:uՕj[p[<Jzn\O+Uo|un2hꋃdhPqDF?|_ Ɠ[#>-;S18+Ì5F#5]6ro4 .ΫB䝚~jZ;PJ^+?>Bd J캺*Tj TW c 썺*jJSW/P]im.l0P.'61O?}5;?O7?\ ^H77C7:_]nOg9WF4 > =mR*: _|}Yuxia`rByKRDX` tbS'$40NedX3!}-beʬBL˓жn;Qqm5-i*f6鉛խOB)`)& ºi\X&򄈍 L a0pCj4#A(-u"!,B@WҗnK?ݒ5rΗ):t phV!/L*BM{z Uz5kUuf2-˥=+/T6)j4ɉ`>NC lb.!҇*a;mMrIF;9;gPRKe&$@rPiCeH`;KKyrrXvA#'nd)  &!X& +#='-Yk)g\}%1۰oz ǐuDCf`2W9Hg(* #]P܃yyb"gD3މ@"s^CO?1묵g>:`Imd BHjjq:;i./z.=wB %q`5-5)<&k|\..0@Py8ZQ48XY{dx<Ɯ.S6"(rzOә|>An6 C &:뤍ȴ1g@׳՝!..JiG"|Ȇ·xԆY u [:VӜC de0^xnm (r> Ý9 %udN<9hC6޲t5[tK2oq%;:4@$tEn+","~ r-t&Kp_߆߾RRV۸ Ӧɉ p ~KVŮ*򒐛Mn;i<$çVG͓\d*,]h\eha˰Kee=1 Ҟ+k $6ȸ7xiPX Y6"{El^q9˜;Y4:mEx9&b:ƘXH_Ѽheq^v7BBWMOzŽpw\O 8Ay?`:U&G&<3ڕ0}KwNR5Ie%i,|B [FL1Ȑ ; 7Vxue~y"$]o(t≤OҠ}ewLUU]6Fw{(W-dm>~!^uϿvX\: V,PjanJ;HDJM*B)3}zI_6Adך']KEM*hK84 bn}\̍ iw(Dcw!/B9e5: MVZ)DQtQJf$ciB)4V,IduUX[cL=mZ#ni~q8,+NKu) ά࿨A׻X#z:ob>R obd#[*UN YL-!kbBVR6EŀbJ-i9kXJ28W @oX:LBe$vceBP2!^@V3[1H[hyEvl[yMw)O+j0m:+hAby&iE36;C&W4aR:gy"iLI1!h+xPňƵ]n|6|ȼIDK@Fl,YD H BdFєP 014HBg&;aMK2.pAZ' :-㘅&%ѳ,{{^cfAYcaRښ$&W .hT?}ARKF RI&u((Z-LdieYĒ22Y# hZJt[dx|~_l0Xt5yT`&?j^lPB|q E,ǩqȹMW0 bg_z|6< G4Tr*ԜQqiӫ X,c';?I껟z[}i7}sDUF~ L¿BېC?nmho60rm; xS^1 ښ6>Vf=,ڥ-0$ԝW͈i0n 309{570BuQG.8=:J_鹣/#g84:hىÇii,Gx`8)_&Q*C4pFũY`OҺFW}xO|Œ?5nX46rlHKߊ_٘ĥsSL+3~fZkø؄æj pO5WF()?zֺs kJ+J[;V[DUHnŜKіJ `5D\T[.c<eb \wcfhh+^j*x-ƭIp I!e PA]J ::lh``6dvNpvVkCȆ5Ga)WnL$P݁uڴ;:-g0Hx,ٷ08dB;μՙY`\wY[\`)% ڋ)戦$dƴ&sr&F *`5r+ ] ѠW+ZM6+mRpiVGgft>KNH9 ̀Wdׅ*l.m⎻=91gvVzorܗ~ZԻPiEG '(GJkQZͬBZ:kbKTc齀]Qo)#ͿeڟWWwgվٮ+w6̗~gE70|pl iB2{a齤a-uĊ A;5ׁyi;%#xYQZYlJbJF)}ir#dk'2 Vg [se]V]B;t%6w sx{c% Ooy@zo:n\0V Aێ3k D~`q?dJV\HDOg4pMᖖX,hu8ћ{0W?ϊxK/6o@gJ8_A=>z֎xcϗQ82(eS nH "(,`w:UCmqDIpPUe:嘆ajj$;$QιMMh2 n_pS r'YA靔Ԋ!0-J> IGR/RAp@Mu,@M!A tVeaL$`^*a2(N=N&"39|e8[ANBs6qq@>;oH3^>lo8~[]k$ĶmwM?}nyg;DLbZ'Ѥ!h^z E{P) /8qA. /*UIsCw搤HTL Z!t0Hr`PYoIwt'N&f_}~ȄvN1ռb`,YNQ^YNޞ6K9V#w߼Ux{hEiW%ip <+x]*,~)싿>>ϛ$:x6,sL2D FjYt8f% |ɞTx>xs;)}yc\q_eM\\A9nrw>åaBA+ .|)v*Fē82& 9S`DU^ekD "``,&rA ẽ.:FK~R*R 0uUY)cGKf&tSw-r]CKA2sS-Wg30bnGΊp/:[}NȒ6x4"Tb}3EWKDgk9},(qtaOTċlN4Ix_8;E6FgX${NkGVAޔbIk;/4cs9.AOl[=izO?=Ta^/|N7g_NfK}Vw88͞^fG?=+/iHO.IΎ.9G^.}7zNf- ͟.X$̯mkU '7;MP=|Jmk:9?[::K=\vԇ5!s5ko\]yHibx<_\Tծ+3x|R62ݼƔ|<2ޯ] IPcyyѻiW`$ SJηriX%ԛ;Nq~dm{N׿Im'g^*Gv]-?{vnnldDof߶mU׽fbQY;W[tI]wמu/,5IgqXwR qB8tEۡpz)}8c7^74<:^81%{9BQIO}h+z lںVeA_K?w(4GFJ/P3ӛB7[6߮pO39YK㴮WNIlcsN3۳LP'k$՛[Ngڼ~t1 ,R^d6 5q;[,ˬ&gS.hAM|ةȲD55u]m[̜d=^?l7l4ۉ3:%h"PG$ œ+ ,_hY/TiӬW<]DFh7e^ ɂ%GGg ]Ʉlc2~a*WF`/Yˆݫ8.wHP.NT z2;~ikhƟg7XQ 3?:}B7GJuex_f\{|48OrQoǣz0=oM7!X#XDQƃMYN4vvp{UaU))@vfJcD^FL!xF XX) J+9KxymK1u'2iHQjE#vA݁.͌E@O%ZU ӛsGE*̥2gP%%:Μ4#(AB(be8XU]aNhk9ą`Q*KoMIt>($d9zkg&Ζ17MASGSdId!,% "Ġ-i ! K)Qg3¦:5dl͗F(hB]x-F̄Dsu8 I. 9l"E:r F:,)w֫v`4pP:Wc{Fsg-8HCE`R.2!XCddAHǦ)}p] {if1hؽ ϜgrG auRnbs6X5F͸{<5g`v[nK-=]f{ߩw>{g7R AkW@pB:ƈ*g$Du1$#ȡ֗^^>A׻ү ~51z %j똵A`TB(̣ iVv< %K|b>RאַWwU(TUb;glX[zOU+f1qs}2(jh7QS u)?{V/sX$7ӻ=t7Xt5D=3IţK$Dzee8cêWźmC4vTRݣcpt*fQ]32CguνV8V|S /ƽz`i͈=;+M&Z-89ͼ64%,cTb7-/͐V8ۭ『>2`MJ.R%^R)2/L/J8WhK~:4^Tj)K]= jߥNKIH]U~6ꪒsQWL-ȣWWJ8G5VUWL3]Ur{. ]]U*aPWߢRҬlYs)m~mzO-ܵ'7kO7xA8<:jFϣgI^d s9emջ"_񤟨}4? U?L+f# qt>[7ؿϮKf<]-AˋIyt4/$/{/usHQmzgcb?|51):c.TB ޵@xU|ϟGw mqrx!/T-IΥZ!?@HH^gAk~`%pX|r &sk!s{ʕ钱wYb0 ˠt*Ӛ{uX=VydrSqr:7,o v*N)xe]mJW'}I{Emߗ4 ,CGP٫.(,NCAx{-A̛$#+B#1*+(O.uCxq։5m0y{R*ݘa_a;҅H-t&[k`]*AKJPXm@QzD"Ѓ6E ]̦NRaǫM+i7PF4 v-G{Jg܃7{x=&tssj$;M~Kyܳ}It@<jچ_i,wIet<;[!UՇY^﫻GVj4|7?^{Dڶenzqk(7u>q6 KeCձ/PjmRnz"HEڬൂTi:7ެq'~*Z8޲]M1Tcj!-45i_#X+^;æHE$$e{tI 6`]SZb:"(L@Mu)XPADd ۍBBnfvc mh<}[g]=~lgV ӡ#`0s| i & GHwjk|*%ၩ5] J%!Lx@aU-%$_9jZ (|!C)[*=(q[zh}<^lwc4J~]"P66H|*¸Hڤ(!,9#]-ފy5^VgmJkKwE`Mk*-kDJiY"gAF* 33OO_dzX~?9Qszou5"dFɎ44J˷r~~Uq!.>ׯ"WTD>]q]`/<:/ˣ88˗????P锇8}oN{f?k6lCp  ૡw4[w59Yi5-~%N2g5/OwCUw?w oaí1_F_yzU6n5j/oqR]''}f]$ϝUE{d^'޻< mx4Ùgus:W0j0֯!}<˳ràKMiF{zϺ=xkEM<[13kO?W޸qnZMO?q9L}8l;IiT֊ Uwm DlM5q5tgPv-T=oə jHlEpS*2Ʉ- chcr/,[b?BiWo<Qٌ̆ I5βs6Z$8oRΒ-pg>\Ⱦ[;Uh^OAE~27}6gg[@֓v6eyҗ'v$ h4)DEd(l{|`%DKBI{i-ri+g^,(0$%^@aeAфxGϖ Y Tc9kFΎrv|^;uF綱EJDOAC)(D@u *`B򤊍`_()ƘdbC6$ F{$%R(Bc0Ax5N6?2ňP9!-:5wҰ2$Sv HT|YmRKrPMQ5rX#]L::E;GQ`!_0*Ud&Y^Gia2/X)|H-ox3QCd 9VZa ThU.$E{OEa:W cƺx~X+to aV7'xz-]K%@,`' 6A>Xe4U6Kp2g P3HuEЉ߃"jPd^Gj>У4:A]L4ԘHNQ&(0$3zgcr"شtuߎ!d( M ^BGT{⟭Su݌9=h7}֦#+xw`vnD (O.͒42Zle|锞C Z]3rkk DdV%LeD5h6Ȫ6<q/!ϲ =fma0]uMyOj-dcixlMvDPPb{/z_ZI ̺ޕ.H+ÜJE] p}́>2e'l/Z)(NBXENJ RqzC$u0 S,d4\ӽ.@Is (UHчZ"RrHҗ]-L mlFΎU&nU=9$k$6D6ƌ2!{_E%\ Qj*kXp&z6P\̺⟵?!ʘ1XPZtbƂڌ/^[~ъO сS ^]~+^m `zmn?t`ꛢl h?%ka7\.MEE,ȬsPanw2^ h1xKFT*!f'GmQ|NTBaj}mZ +֚9wkvX.l{B8Xs6.M(sP, xw@,^MfO&' AVA$CXFc*Z͒H3j,PA8' cƞO TMel4 & .¶ W]FgJ̅2Kl䬗?j rqR6u\Ld[H3E^. ?g!FfJ$8+LI$4"dy"c/!&8=Vm' RO (N;2M5}=r5G ~adMoFQ9An{?Q h~G>t,dw{.GÒ]K 2J* 2<5z7o rM(> 7HAԙH9CP#'FƩ n+S.S[7kyF,")xdCY@/5eDDL &9(H8Hg rl+ƣoqws?_7D|ǖ /RW$6͞ؠZ'+a@qT@+;qmRjmXDS :"2M'ٍ`N0>H>~ݨ%tԶQQIŭ5$L/*@YzKa j#]"gb?T)K'gvѫBm.mrSFw6MuzNOj8 a/P :J0D(o#^#HRB5ń!z3>|զZ?Amwuq^ ewWM=ِ DpfxL01.hָ#Q oE){÷#$r4z#P1yh#v+HiIPT,!d`"{&CD{fh9);jx= ̙|;#܀"☉X(6jhx)J%{v$:D/xxSk<\rcgRV<#@?.S*``,@$1-OPzɓE$n w2 "3zSGYEa)qV \)ʄK !>*3l\kXYEn?r P[; i@d&C'n_x2raq%="p<<98E$3^jaw J+M8D@ƴhfhr}\/˫?&IZ-ާ7|߳)l:ś7kpy5H#'gMz" ye6mkryog˩O/^ϻp)?մx G-_}׾nx>Ռ/S'F09UG%8/a|jRK&|3. Sd&EK׻['omQN]AρWa\/p3 g}&R͉Q”-~La]eP;%oT&vMGۑi|hJ-~IYY=xƧ`핋Z`3M_4n}E?VZzb^ۻvnWtZw; >ufm;N%jBaeZhpnxJ?ՀՋ!r z8o*'ŇOw_ԿQ7M?D !G0.KCbR{JcE!C[5X4)7Ao۝DW9n;=fht(wGd߸9(y*չ|sU_΀w&{*6碃 Etޡp6vL5 LR M1@M{MfygoƼMi{D鷒b=07+^* .vYDDFonӌ\s{z$v?'iց$˦ç:i' FkJ(!ژH#WqKUYtz-3i'HFRc2<F}56xkcí4[B2sCQ{y!I%it$AhFMKc5_@E|U-rr3tGc}zT!$UHQ+Nz y(#esu$ZrT$@;K1 {\p3P ^ 3>m 1ahPs5DI2kJ;F:,gEOe$/ Ka%er5%s--Qf Wb*0w g솹vRy?!>w6HiwJP5aV0#*)dF'=Ɲsd_c¸+Bl2JlqB?MVGؑ]# 8a^i2lH~;HyslVoTcySPy [RNm<N/*>~b VGPӢX~-kpq؁8%X\\~HBa As 7__ uF[aͶGZBZ ?"Nrjn)G?WirHH0yiI@ #mj?M8ўR|t;hSݎPR 2~ -HBEc#4)wp j4) FE׆!:Իŀ< Z3ø)a4'(xAcfZ6r6)PP M ߳ajO^a_HUIy;vB؄3 )^m(\05x]1=^=^ c^p{n l5-rh-5y r<l塴2׌'Ⱦ,'@u͆ˬT)E@pfսU'ݾNLL ޭr;3?] y3X^+S1 0GM,+&9NxQ>:_|Wu tf]j0dp*́Tř{sng{*Oc J cF1AQqx^ak+dd[u Z혨_29lcqdY;}d\uYp`r6,@:u*C)GcjL_&~?^ll3|-ͱGGAG!Ѿ #~8(K-hw&̖2jk$(zߠ`yD{.9$ ޡhك7R Md X,VT< Y˭9)LG eZ30{-ADM@6Z+%ﲑI1[|p8\λpPSTхW7+M'uTsXAkՅ*@k>8l5]7]UM1{tm;D$.+LBY\)w1aurLuYXkPYc9 ҵ=/\ƣъڻw[a,vCMk:n,\oWΗfmM+lZ,}콰IƢ1uFʾKIAKpO3D]8}vR'AKM+?ɛY8K?LF{'u*T>H=~ݨxԶN.Q=)LS)D@(Ko)lp +>Ԏ%rFg*uVO4+\mBRu~ l0{qhtY[n[Ճ<'ܻ Ճ m}wKϪz0{>l_NT{n3x8ՋqU R)AniƷǣkzSO4t" C)R\SgiC.Ѩd8FKϱĒ)ʵꍍ{mldGrۗ&> c#B `!QK%?YrY!exswA`jlܛ'y(#!SP7.{v}Y̗!cj4`5rjT޹ƍ\ 46{}d>,IMfdf"YHdzAni[m yuVYWR#PI+TK@j{|+E] w͡yFǫz"~yܲL3Q[2jc`&6ETj?64ajT[@eC>+kѕ=u] ԢJCFWK,d2E;u] rgӇ=t5 8Kv\] h0J姥++Ztuhѣ}X3ҕ{7\tŴj{REW3ԕR5e+ ʱtŸ|.Z姮+tz ue,gvmx~['o;+*&7ەNUp|S}z[/WחKk3.gl\^Xu8j(t0>}x0`ѴlBK*-!teڳѕZO^WBIKT9G]YMhs+ Xp-+%7u]1EWsԕ3|N+ L&%6L~Ef+o,t%q ] -u%z樫`VD] WJhBIfsqO9=oܕ3w%nJʝGukF93k;AVaJv!Et诹^T uB*ZJ9K{/}dpdwU2uY8`a "|q'd'n2nnNywz-{exh↩7-TB\(c* CPRW.E?Հga;]ҫt!Z@2 ?|Om&,|k'?ZKWvO:O͊Sp~z+GYgmh  Wm0kY(Vm"+nY4l?\rqLTڧ*۫ U~:Y}˭t˩:ݮ LȓU}}$BJtچݾIӛeÕl/|c}Yn ýrCj`ǿozP^+$#' MS (mIE*+JB;QmbXXןV7zc[x~oOn@!n6%ՇVޓߎU;^=|TSi3ڶ!mbGnMP:˜yHhYwZ}Zʃy-kݛ/Oq#K?/svv ?MGێhӞo`՜x#>.R6r kvt"/x9<[SƇ\*{8ReG =(z6}{óxl]sg16E}FXL&U,!M|KhU*PXVfyP@+p'k )ܿ1tkRr'&꾺uy~7 6e<]ƁB<*rHͫ=㪿$ݝZGVEqkϪJ'Nks.i5ר:U`Yt&a=j@Nf׋Jz5ӫ+h;mQ Z=g8݋1VT\*8NEJ[4qiUuRrӣ B>J!qJcJOܔ?^+^kNM- %PL--N~jA(dg9`y~5 .\t%z/w Yt5G] xs#l+EJ(2w5G]銁53\O^WBI {=!8lpana4`p0-]]᢫C9o2ҕձt|.bZ$=u]1EWѕ"HW sҕ*~EW3ԕ; m{ ٮ@cXB &H lFӂ0M3R-lFbSוlBue3Xt5]q+DWѕІ %EW3ԕ*%v׫\t%O x] %-+sUBNpӦ!%`rѕj7u] ]+Bx{5JpEWB`e0tzZ`c0ڇFFzcejԢC!d+U e# Xt5C])J)q\t%zѕP%4TaǶnt܉Uk2{nUiF'\rѴЎJ0`MQ:Σ?iPLO,< Iu7Yh!Քl &}hؠ;;T-Mq *N9u)ΌΓTN-g6I["fBX9[peʹԇئ}abpmeۍ`ѕEWLq 7B]QWN;0R6\orѕВR)j:)>V`Pp1VU4u] f`aN{pJpU6ѕ7u] [skHW l}>AuvH_+ݳ jpc6 ׏ ZđhFNBWzC 23JpEWB;YC(5Zt5C])봥tFlt%+nʰDWsԕ쬲LUiheP}u@-Œ4-7\U״PX4=CMǃ`8A3ggJ(;,c_AFb`FҕzրREW3ԕsHW׀EWB&] eXsԕlNb`!] fiu%*,JNA6T;ZfJhB閹9 23ܱBF'+ Kt|tezYC΍q `cx0 ww%њ殆QNN3@WfաE9PJ=f+ #M]WBݢJkSJpEWB&+j |g=͏ }@&sɠ&}İP6嘞K>zs-G\ 4es̠zݑH tV&B耖aL˩J2!!v&L-WSZN| 9̈́20jw#:;u] EW3ԕ%2 >]Y0;(;/+]uJhM]WLq QW U>O<'fsWBij"tu6\Mt%vAt;GWg5 CWÀiYWhHoyRv좫C\Fb`:] Jh=L]WB̢JVf+`ѕjEWB&+v u7!#]100ܯẓt%c=&:2EWsԕBE}rIhwbzvŽoOAVeivѴl4-VO]B99j(u+vD mSҢYO^[16˰ojfZ*׬6׼\rzv&)fmދm"'}*Vo~x~a[)O'wg+9ivjsګf_6޻?_~~UW:%mA?.9~Y\^pYqs{y͕ewwccmvheXt+^=wW="B|7l}r &v7~6Zz-f'&5]IߴǷ¸W}S_Ve#VQEq}@iӬ8,j ?#g)(; uGOۿ˯?̇ ޤN^]VW;MT֝2 UZ$tAbJteR;m\*Br zER3~m:>}}7V7Vp2.Ӌ:5ѩ&b@ظL M陛CժA;Sb ZR`S:.=2CPS鑳c5e,kUZ5/4jCN6ݗ2ljgaUi@\R-gYo8*6P/њTWh8C^Z `1i}J1%*@(RJ5. ИQoP75Ʀ#RAi3"Wo/8Wڪ.vԪ#)!q䪥:K!rcblrȵcV8otYK T5U#8_{'j4re6N$cBk*5zNcmт6JsmR5PQaY6\Tl' xBY%.hnUVǺio*j|4q_AHT xr\ Y:F,}JWgJ'BUY)m*eM%-Ie.%Ϳ'W^VUT6Ʃ:V15ZdA⮤XsiC)@K!,@x(׹[Ê6\"D;c&D!rn;~ŝ~V[\ Mq-*Oʊ]gHZB)8Z,yT=5Amr{BbJ+CGR:8lb}M5w\z/,ppԜEFqw1!_'] *˒8"6XȰ) k|} pZqE#:L[w1v Sñi2]koG+6-! ]$?,&$_f 1aԒ}%)MI-Y<]ust J$[PyU벫ԕhtyֲ±&6V/tt%GeR\Z,rw+3\iu Qx0nI1 kst>̡  "=wMBA[Y&I,ڥL= V-tC*D4`Ԭ4 3*0 e42, |Der HM${4U2T Xp6L:@@}\E[PB]Q[=ಬR uWh%Wk@ e º7 Da( VCH(PED&TD{#]ozg-JC(ʨ[sV ,,xu0wD `L `iƗB,G-ά :P%lhAA7L:A@GJ LEwf+%RI9n/XT5oڳFxwD"=dPIWm (ĩ(HvU]T{jyUuA"F_R]Lcr^5 5$DjeD ^cMȲZPҰ*4]AՊX>sA ԙy 2 m/fĥ"ҊYo"棊1͋BIJBm>*ymgⅮLn6hYKcک_ok*赫3[ݗU&}6}`-$ >:%@uP< *YiU2Mt%ڇAhRU! 0阊'8$;ŗ9*XqAyjN$\Pd"U̫ fMʴL5ct\%$0AɣJA&ԭ.HGfp6᥇GUS Y_"bΊf lZA]֊ANE"cFg ~lhD>"C M2f-B&ȈG!A]JrAjyq:EjC$*KtPK|̡P}ֽQ@RГMp2E`Jɣf !nK)5Y$iNJ@K^Bk֐h U3ڄ`1boA+}E̓pD"(YC Zc~?Xd9*I@d(J+1A2~ȃR!*8vGyPgUE תEQeXY"  De#zblUNX)j7h=V Q{H$Q5$YI$eoQڀJM.U{9 2俣 %0lӨoAK Cj)M.1ڹ'Ǯ{`E^QH"NdҠNn )DyŰ aLW(ƈMnƢb${ag=u @ +IUE :ƀ6mӺ`fa+5iQc5A&E3| R5fҼLFALPha3BBvZuZ"w *Gђ?j oZ 05J kJV@Px Vp*-]QZش }BL M F:`G򨵧BSp*qKچkCWQ[1xx pQi!XwԘM5U7 "&b9\Y 5I)8I负Pbc't\- 5uhD;S`]pB&^崟ͳrp7LT:L%v\Gޚ](NS9u(zsg@tUr%x>|/0zE{t-)FRn W&&K˯g7)]>ɇ~ƵM`@mWf%Zz\^/./B͗жk)rm]FǫѯVīݗV[7W&F|y K1quhS[ 3G"t WF \:QAy0 ޞ(; Yv@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; CI8 978 ѥNփZO?Ni)j{}\P8Z]ݏf|miPC2.`oc\Zm#mtn\ ԱqKzÀ V+;%u'5B陮ΑAvQWF]O2 ~@9U/  +;bƓ+R2]!]E+VZqC+BO$0]i=q݅NYtu`kWy0<]Zt+tC/us+kW PFtut~oT `+Bk J+%޶l iںFwh3&;r"fFl[m[x:L[az~ᛯFr}hEoN08w #xM矷N7dubEn0tEp ]Z (ݩ=+SrNNW>v9{  `+`XJBݩ44Y Yx#]0`0tEp`D%5'OW@9ҕ BI? "v8z7"쀒<ʅ{wc>}`ȕcՃ) sm? e HW>h8'2LWgHW!DWo3uEp ]ڨO2tutTqHtZgv3BN$>0]i*|E?-ߵoA2ioQuwuU/nVW{on#u?rdVjpߛwa=bL6?yBT~7Fd[K7 W*^mҏ7mw,|3}z~;ωS ;߯j}ۖ|ͮ./M>:SNQj]l*J>{}P}"#{9ݛD(Ŵ9~ ~67=ϼMZWۻYޙ !\Nʺ+Zj^rJ9s-)(iR+}π"}0;gp(.h\5NNxPbPS!Z" 6/-ֳiyp]}7) a lr}ntKv.׳g4CN]NO4ygQtB[#u<BM~}M~zӳM.ݼdB[(ӼGr꛰ _zLO7O:臛GwN4=fSr2AnOo廃o|w|8ݾytW?Pi7rGN>r3_ߵM?/R5?{Í9nty8zh-ZA¿Z/-rTdt=po&tOB12 …TBW>>i&']"\;Cwa?|xbZٻڞ%W~1^HUɮDQHwc~5\l[}f`69/]}NUTWW_cmj17]pB ̹9 G)73>`PhON>6]'^mk^4XW[%,wb?p29x^3-(G{Mp6.7 wFG|yi_5[gy4P'MX5QXqta+oWFRZpɚ n Ӂ`R ]`gYxgm8(b޷Iw+k^OlK^g`TJ 9Hb;G߸B"c\颉pb:Iޅ^F$%@R}L.V*a2ya&0t.3[WgC>./;gzzwOdVqkg`VKZ[X #s>[k@J+#";o2K9V5brYYLj()4F;A69xUAX$i8u Ƅ{\c S=[kK'RP'M]~t2˜c񖞻>w5|s˃&)3=SSkNTUU]hrO|49lΆ~MB2/{p"SoEsiv]vy`b㋓ώti{o{qܙ.XZ"gb`p\h2CN.1H 81e61x>SF0Th՞CLyc#AS<5Ouzd>[|ì}ll?s5d]Se`9iБbs*DgϦ&IOuݤ\ڱI$Q ƫ9N,}T[:AˤJ@ o-Vems^ ;7-sZ2J~:ߏs&`i˘dΠd2Ŵ'c*d!PI(L$JBoBHr^+ˀWsVvY\{_2IqABK>e5JKw^MWfM#i%G)&@bNe2N2m˴s xˬ| B-mo,:b,=< d̒[Fg',(LrGڔڙc1XIGUVY +0X&D֨GO$UgC;]23{N>ܤt1liuʡ»%H:H4Bļ<1ѡ&P> O&cJKNxJg>:)B2)l fibOA0zDUPbm#˿V*KI8#URV[DobToPL݁b(!)qlYS(|ssSI U7W 5 ]$ ) dzl$ri>9ZU\ZlX}1/.?\N9#s.njɀ,!Qa&.D֘"#oK WL/:}۞[|3I@#rTFK"pdɑ!$C4vBcP0M w#zX7Zd\J: eBp[!K4萃Y,{aî"^FkcM I5u4XϬ Q<lZ)KUF%fS!hPJ !g%u4 q\%>kz}.Q%Jِа`HB"ݔ0׼9^ʅ`d3Mɗ`}ٗoq(Yt5N]nS 9٣Sl06q~Is4'iѣqfTBq Y }cLna5~=w/~{wPo%%[P&׉`"Z͛6՚MKT4mzomװiZ]^5ECritpQ Cx/3ug~{69}v֮lnI ZLIiN0cj%j |3- J6n Xp}w_ꎤ.Ѝ k3RVvcy|8t4LÒߧY*]@4p%Y`vQ_ҳnS>$_%xCkm%8#1!__ p6+Ѹ<'8 QkYø-u79>E5J$+¿1_Ұupߗ;-TF6tkzF$QY3uI9rɉ~L=j\ŘyD1$dnzFء9 ;^zq)$ IJ w)i$gLZ 9ؐ!38ufpvV5x!3˄\tY$IAeNx0[wRҙeXKmi- ɃQA>]OujR6XEv4Θͅ##}&CȆuSA)W ;0`^b4Dw' ^gL0X! =wm0igޚLtև$W]u+k2=,8J` 1beA2fya}FS@sWvXl^km)9A;/dƔ$_,p\RDzcنt{Gs6ݎ k_TZC{O/hMtX0D9Լ" &hE؜FͬRV9kZbnKaAc)f}݆Qj"-zu/՗ jWźZl`̪b-;i= L@Z20t[ _R8ҧ"l4\t^O;7,ӣ(?m]N&>LCIYa{ȷ+(~]"i/8٩1s~!/7s/N./ynlvo~~]sl#?> _hsk&gx:S۟~弟M6%CsFquC؜m h\53W~|kpwt:)qܺ iԪPV`Sx@oe{Wg`?/ =/k{V\ Q+F/(EeI9 ©hM6"3-se-ruU]vhJsjmnx;&YJ⽏ IsB":NCWҰdB6ZVu˜i%33xCmJ } c. ($F3E{^iKo ү. ?Ļ)>q't֎HT `)` 沀󱚢uYh.Zcbіl n>z1ńFlm~ˈeF#x.Vf8^( <"1@a6@K`NJJyK8-PB xI NmVM g|i'dBs2#7'j8/^gKd_^-"xqf|)cH$RKyA@MP2hT✊ADAEH]fǾ|(ZCֱ_V]Z#oT;TQTN7pQp)yޏ.v,PKǣK'I)>j#ޗ99uer:S:S4 u^D7ȇL A}7Hŭ<̥ qdX L䳐pX*X,"(|8nO&QABf= Fm`*Sfscx&q.۲^Aμ lY|~#ONg ]=lټ!ޝtk=tz'!V6͚ҏS;(JDdQD9I3X;gZ'Rb{TYXf" LV%8X*7Y, L]™depD}6՚1w T9ߡgUN8Ny[}lP~ݦ-v`Yޓe('urqRn$ÂDVc?|ʭؑkj, Zc#i;gG%J~+o6¡>Av3`y4+O[Yt2r2( Pk&l>ԳzޘGj鼋s%+ ϼrIj̭ PM $ <0.q޿s+䞙R-&QU ՚&p)$N?B9ʨbHLA[NWzQ"7?PPVJQۖ o%} Gn](!-s7:2JfH}NJ aZIb5ceff}R>i|b=I p=1'ϞcyyU5g~,:ůFo&1߬~#f;n8͏M#^.g!?x$%G#W? `?e}ձ?ߪŸTW.wZ]"3Xg?3V/tyqh:shY`Q_࿓s!#abUDR+\oϕt)-GŪEq.;t'iV6(^>'n^Ocn9^vf AQ.'%>}i:kۼN2X fa&.ww߷q79A{bF)~o|Y6yYw_>ңw`FVNvnOacz G=X^WgatuaQt4ն {!|H'8VQ8ct;(b,*+~v-upc=_w\򔏋>?i?ȗGyn9:h&8U^ȂUEZ;./Aޅ4炑y ;}债ZY^]y@^s|'򇛸/i_N?shMk*]nq:%uql. 9K1u$_cxn:^1D7Z:4w\b=j\TԤYEo頦#~}1ho&qylаkn2ViweGmk 64=Z9A{rs` yBFJ,J`iksFKg&鼤VHy_myMe^Ox)Q i!k'ZQ@RZ fBN:䠃1) 0^+30w<-mNdjmntR*vz9}P€IC=IkLzMIJ-KBtN2Z/J*cֹ ef#WLIISm2T?fcL7Ld< {JE1v[6G\3uQ9Ў4!RzP2H;=1BIt)2vܠV_uAG$Oa öbƲ]j)@B\UMBd.cZCN):3͒Q"LQbtA+0&U&dC26K7;}&۴-f3h@t׭o^&2Ϸu X٥@gqJ10dLM#:º*u=9ƮnQc/*0zAW1ڄ_|[gj&@iYHU@R$SU?}T&O#? ]1CH'o(؇j 5.gnpVx62/EZ`(+"S2KaaYB9V pϕf=n88Pe}r ¹Ih⸽mu<^ʇQ7u\U~Zŏ3J}ju od^1ܠՋv_Ϊ"amuɓfcq6]Cgӏ˳QWї^}JD sBB]+D@W B}+DkX QZ3+KּGt 3++zc "ZeNW҈7z{Dw`%nW3] -DWCiނ@W=e#BJ6}+D{譁t1M]`*ho *1NWR ]q&:޷\7Ef*qӆFQ?h8fcɏ /_Mu9i~#K svdARB {*}iD:U"ʮ9p 4MhZpNYJMo ZH Qr=+ɕ}ڳYU/th :]JM@W7lGWX5/thP*2=D҂!U ]!ZycQa!ҕ>XJ[4Ճ++"GtE JBJt;fCOPytu+{7\&ĽЊ{JE|Dn;>CO%T+i_ uBtIAE 3BzCW'o[tHWIEm'AӄcNUY**B -;Hn Ad^/Gr1K,x(JY.=-ҖJj.Ç"ӓEF[G](~DEQvB&λROH~lBVNZBWGuu6{ Ց6+e9չ#S'j+YnT,> aefagЍ?.AO=atܨx]]W}ԕđ辍xE4gT (v>ˢѕ.HWf3VdWBKvJg]G !>dWBku%]bP=at%.+ ]ecҕqځKZ6s(s~Fו~`c"iNAif4]]Mt3['J/zlZ +ѕZ?֩J(Zt230S26^WB9D]=q98)m:q9?WNnRaUxV+4tX5ŗo YgC5wnJwo:8E]4R eAsr8 fk;)kE'j%pËH]_^~_~qTc'xuƗGڮoZbvAˣU6hV(r[s F>=lUoa!C>7'C\kF~E(vwk%CZt%vi2,+a ]8ٕEWBafʳe?t1ƮJat%fbgWB=U`ϻ?hpP>'v"Jh=]WB+VUF x5 xoFWUoQ~}VjehiGަQLЕYtتG׬GWڌ+%/G]ǁt`0]{F$(_tPE;HW6ѕaB&QZ]Lk7gDXM뒹/jA(v좫V*5؎+%?J(Yt2ySI)ѕz+ieX}ԕ5r\t# @(w|ݼkuӳ`ܞ0\FѴr5MHہt%ѕaJ NEW{+4&4FW"+3̜j_IEW{+# ath(-7D+GW9#ba] .Qt%+ ~*@z88p#QtZ.*]X1 t|Uur+ #oV\_\/ߠoQVG7W_ٟߴDbj%C:䍿^mwzK>_E>!!o7BMhj_y}?| N.r}م~{ſ8m ?]>W W! g,|U&VѲP ]Ctի~xHx!˳k zQ/ݖU (ִcyͯ k_xw\}}|Í:^⬷'mm[M}7a}} v0(W8L&ʨx~3=Xy2MTpÎg&FL(3]ѢV4y]i֣JhEW{+wv ]8|iBzusוPҢ}ԕE|O`'e)EnR mztivltvF9guE?sdb8ĸ2s&Ug\j΁QVK-x:դJ^/[*ySMH <{M-0S h sZJ=ZHp4a] Jhrn0/z]Գ>T]Xq+}֧$+8w] [n죮Ƣ)`>0ٕІ?ŷEW{8HWf3CǂkZGsוPmjt138Y}ΗtMÍdWfٕPEW+bHW~+5zKz'[2{,ZboG[d3*7ŻcҾd 1']}wder}wZoaGgj=.%ozE)W{ঠ2'gM z>;E:Λzǚ2جFTU*qƅs5Z-^X5O屟qSak'ki~\h@R+UE@(F\C@By{L*됵V 9 l%ZH~"DsZE-\ *5QQ9tm{թ6>d7Ңhe4547Rmޝ+5CsaTjk@fdRb4TgFDhY{tB؍ьecs1׌4*E%|ϞpODkE4oLju%ЖQJCd7XeXDe9wT r c. Vl (lwEKxĨ4ylB ̄8VLtv<%b*gqx kΈ KrNǜo'! yF-ZMM%jJJݽ:뜌%Jmq>,]G&F$bBGߠO(kcmB&ZҙnK KBp ufm|I@eUi#,ړn_,IՐ.(C2+THllnCZnJ!(R>Ս `%]Ai { Kͺf6*EF R+`#SnBFߢ)QٽdQܐ>+Zh9?m3 aaRܪCXu>*E Ţȳեǚh[]Y%oEC\[d\(ZBkhQ5k.om`Td3tJۍYް֞ц*hS%Bkj*&@Pɔ  )r ۄ('x ѕU!\ "UӤ V'`_0LhFxJҼMX >QcEFtM餻4 *YWڊGp6Йl6"vm\ !b4u%HՐa[#ȸ!30ºu Po"0P,r';uOQEg1n2ƭ9!6 60ڎܐ#Jh)(a$@v(R!aQmux+ r=XGdkp2Xi A؛j,8S %82 mdIț0dC}aX ()e Hn5] Q]ZD5{Y/ҭTуYbf%L#(_ʔ|].SDʚm&eH#ɸ=(tÆMl!VU챵9830(2+ŌTU̘ubtN9U[PER;4'1]} y]aGu*#m6D @;k1C-8As^ πB.ds%jy=(f ƌZJmJ# N`:BDڦ pd*j#:: gh ?Aj!vGxc)EF W7CAasWuCϢba A0ևgZ~MmtБpiY xUeVGae),WX w afs'D|^3M:*2c`Hu[ &b1wy^˭g:]/< ?gӲE&AaYƒk!nj!f=0PSݴ{OQv`)j,uZ kH繨) 8KV  b31d0ݳ 3| gb7%xKś !u>f戇py@a%\vFU.a2aw !0H&P#Vi 'O H@vQ߰^ u&ix z_!+Ep ")طXu6A3 #v*vB!D b !EiQ-IUk`zg |[`pԏA!s6,ѳdDQe`ci ڍ F5T gF (̤X LGjJҸh 'kGHu~=(Ws΋ jho:k /5s%FA5"m@naXpnYɌB3`|e\aG;c|DɭJ45zJH\03ܭ_>p4-i3ΆbU~&" RLX#phMxIaB%^0p9X D* _cy]ZF Ax3!z cTv&7fe*`r{;DkG]vA@j%0矷>btXcK7v ΢6j2ӧO_[n>}?]Beh90hd2ߗf6{;ZveYK7e/>\&5In\q2a6_7BdO<@qk?ZZb gb2n]9׏۷ޜ>hO_S7m!mUcYޅ}:]R gB-a'nO?QG*L^?$&|1ׇ`rm*L Hing@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DLBH'u؋aat>&{_ I)ٳd$HL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&гe9W 6`@M?ң@MZL H)"&3dr DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@zL n=(&op@aAZ ) &sd rKL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&a}Zz+^~T>l'ׯ|N2~f#H 8$@T q:UO\q9>z/۱+`UpUV콻jRJOO޼de=ս5'uWV=QIo]rWzZ]5-?wuPU֛}wW3C+Y+`UCqWMگ\ %3tWRx6lgAlj y5ڈοVܵaۭd|u!:Lhy~bhhN09cN~Z֟vo>~KF/IQ֫h9>w6kRS~PP?y?Max|>˗tNGjRw[1|厝^L`ӗN|]XBN=z/z f_[G 9L;oCKkYílOԼ͑E5]WJwQGUӕ0<JVx /!{HuTv\մeZ_Y' t>ڵ;GB >jf64ٻQ|7өKK](jP>Ɵ1tRuI}KkWv@m-F)rV]rN@}%ʫ7FϽZhZ{([ MZ}kRzF[ qkA)g4? wIjrI)gvo 㮚B/{wդJ&w|ܕъC !PfUQ0ݕnJ^iGg|oz-o9:~Ώ,Ϗ?Wm;}o~TcS& ;[} Fe?%Z>zLܠs>Nl omUöyxя6 (7*""EiUN.ϪR8X7?6pWf?~fgAX&iCw'yrzqA|{9bߍ1/ =v1">=>+kK-Wps`G}GRQ"DF-w6g{&X+B ɭO[jN"尹b7\2۵kb:Ul-LzUu*)yy%~8Ų.4LYIpˮC?_* 5˽4MzVNj~d{K$$TkzzY6ba:cR&Ջ+e~Rj 2mʈ.RkLmh y)>F.=SLG279r6Bu?Maz4hI^*͇n@[jzi Xwإִ7N66/>[ɧfju^A麝ϗ7/ ]tt:)]o-o}Cg˷-|Gۛcqj⺡WndlѷRs6(L)#mcdG ,8UŔHjr,?{ǫ(l{#ZqƔ8g5vLZ: q}v68wޮ~'33MotU_I!Gi%J'TGda6dujS i,rwO^M[5ؘbtPN$볰*չ$KP4J I'wf6_|KnK.KY}OmO\ 7;w]@Z8s`*e8y.ʖQ5YmeNc1/G:(i|Ӊ-/oY=tklRV=yqGjQv|e8-ΧV$%ҊzCHIM1.Tˌ "sF&~mvq^] "4c!oާOw0M""}N%!J0QF}IVc+rռ1!^adÙuivq_c0 yu?MЋy[XSH?ҭY#70q&&Z;˷~6t\8~,%Wg{$XǷ* lr_jRJm[+JKat+k"תjq* &>|G9C ZjoAٻi1X;enݾe^yY̏^_ȗ b+/{wx\]_L'_L#V][ɠ|] 'uuަ ٖ#bkVZmmAcZ%|f M;>ٮ{xMLH~X0Z37[o(<ʭG9w9s{Ã-e+ޏoF~)[Tl6bjeC$m StSmLVx A:T3_XWg\9TENL :˥n_wM|6zSX'w豌N$hMxs|_FW?A>宩_ӷ=&kgzv DݷX6Hՙo:w#0gkV^N-Pjkn\J R= r̘-vP*O}EfAC};p)XtUCΡZHp1r16\LG\L?q{./}R B|Rju4Ld.]jeDQ߰9dV!%UqSU: x9?a5ezm0qnvx?"\[]Jz:,P{ nEZ/A ALCKk wׁ`BIS"9CUh>D Ŵ( Yŵa.o4ZRc:VV,jεRX]"S(͕51I턶&$b l"XbYVC%7'{r5.x+Frȕ_Cȕ E^`(-D,-] ]\5>[;Ĥ,ҪȄtu]-ˎy3 >>>Gѝ85NYl9p=0 28dQ]ϚhUFUuvY& };: moQ yՓ?}W᳧_xLA}޴"͓{+ -i%׺6Zw͍ؠk[tԳ _ۯ"rGϒ6q$ #\BEmkvи͓OΦG ϖ6N=d/cI|;xE /?ϿaP]y88h0>G,898:Hu 88e|8`A6JqzGn^*J4K|V}ku(o(m\.IhؠW~ӶcDIh?#9j-spجF=8؟/_4\ 5ٟ=S%늇kKVZCk-"v߹ERe! %9LjʼC H,!$4!zrNTѷv-̵x9ϩ㩷&zь I%rFkbkYP`=&J6:y.NJU'Imq|+Q:7}p#uW,+Ý&Rq]%BHU> )IndůO֞[vừ1'bkZi K3MkJ?>f+⼠v-#yT@ ZX1c8XUH¿ ۫XP8Fٖ;&:GVA A5@#V%aٜ\; $동9fvvȡʷLR9j5 ZU|Q9ܷ᷉4_EW}I?`pj7ڇRDzL%kۍb-5m=ƹ?0i/U,UF:!V{&SF"Ig$s^0鍷GX5^fH+Q -9>JG, BDNJx%$hB/q!>@.nLмޏ{Y- 3߯.GlږezOoTZ3ާ~yAJ*i*CY9H"V2Wx\Z:v'V߲)-98BJOb\),$4(,4,薅94S 1Pcpr<."e`uhRHl>T.^xˢ9F]Đ#Nۄ3MVy4WIAx?4,jA=r-)AƄE.Jr4oe&LgIyp($qɍT9Fb|aA-F=M%TK$Tn,'[a[M'鴎sUW^=Ri,76Wq=gr8?ע I'kdM3t35[+ԧT;(@OE"e4(콤 Dr6ڂ* L`6ה+*Rq`U*BL҃AR].L@fLz*yRz9%F=kxpxɧa RJ)NzI!k+=" "d^q =00hy KqH ')yrz+M0Q#4D cѣHATĕAH!jYjF5EprʝQ9s4S##g=0\p=܌^x7}FjzQR;diVV jc<<ҽ~I&$!$Lk,:98AsM( A%g=Kꁧ 'ӣCO'ؐGj ǹDj*r (!`"{H z/zYMCVUJQiح'DҗJxCכU=øс%rAФdR4mU.eNfLĬ sofYߴ8l{;z|gsß}$%{=~$,m̯x#)y/_\P\k78mht<&kN.͉ó~y`1Q_GqC- k*E)!`NdoOώ i %1r4Nm˧m6 K/s7fA$B}&rv|T>Gma0\O|M\8Mg9qΜ 3:ąiriF fIgݻandd< &׬.:sqAՉ.r_lo|l&0|'EcrR a@>L(NZ~]xhip4k3HO|i*z &]6sN ߨ4?V`(qu rWԃYOyD7b"$| G/ӧHR!jJq+ e^}ꔴUCuS1M;PAtfx.讠hf.=u|lTi3w799})sdS-2P~ggLɈNVL2PPZ\ RNCq.I<9-iK}h t:b, 10Z:N)r6(nqWnF[hzKLɱǑ| uЕgv{}2."U)=kz͒4S9(+R,h^,,Zv,D\c;IQY!y. KB1wzq˨BddP$`I(9;c ED/8B1"xQSID#xXWM|CB5. u1X#I RI)ѡ\g4ftX$gZ-*1|wJ(e[KSJxI2J sDiEe%x|52!yI1#:m"YҶp3wޓ;P04OX܀"q+4tZ2 X;ulw -߅+)Yod ޵$_!y``g힗5XjSYEJ,V۲ͺ0*dFs 1,J1m38Z]R+k`ؖ:zȤrOb&s,r]Ea7Lj:MJX9YDOk$ es4y'-cZ͜gFk &bfC'dq!ZJqpNm.`ȵC!a $%1*W<ޖh&>&Kc |1<[¢p+mA)Bd!yޖ1@rK^*!Gi00UܿRoڢ-9\k옵*g-^NtלsSUr>|+D\fNϪA>3 C+W1(yReRe/ZbjQt(5*4.kr ^㋢]ȵNb2Ȯg$Drvc*Թ$F"F\.)AP\|-s#wPw~z,Xņ7ӪX4h]([ϊj?)Lg<*-k:':Up?n }t:}y4?T.fq7/zFeKYi5JEcX$ʷ&Ke sl=WGꇅG胲9b-Ҷfbכ}m&_ݖP+a-6,hcQ{64N~d::k71ɼ#ѻ20.f(۱+7G4G_k3lpW_x{vzs6ph 4 htM&*n+U+?xy7.g;'޽-y7c4qSАh&H{eYK;(iJeC"y94Kox·Xt;ӉW hyN(D%Qˌ #3.0U /-"s9z|RDu-:Wd~AsUYe3oBmj3bLW B\kj JOd54ԀW)JKTTZQUǕ+8].&gG_J[$L!޾hߗ(|uUQ{Du]6s'{]%c.1n&'2E''tʣa }Ƞ#`ocq|7p?IMvځ<:Y>L?hE&" J}<5޽"1q)b47AUH.Fs*A3^WV@В<-– 6*ZeFÒ'r:U " >ʈ-/ _~`:V9z%#y@f6 dSML£ʱjaYR:u;43E}Y<<7n1Z8QM;eW9 O ' 5N6&;4]ayxx;kw[M Iy4$\BTڑ&P 1ۢwj3C,kdjWW_¤S* ,ZR\b qEf1:Cș#lrQ%sϠ"0C ի oư?0v09I}y?w4P>;W|~I{Cǿ)+~ȕy|]>881}Y XPG#$9MWFw7|w_?Fyz?Zܖ!M4scq(j:C2Jz{qy~?ɼ#wR. y'K+Z#cq; YY}שx]҄^O3H//78]>[4tr>r6=YǼh}+hse{s|knl zg̬}kX+si-MV9zm蝜 ܪyt{W|k7nߜz=Зg^sxMηܺ_uҶ1Ov?ݦkrŻ4=7_ ZRWCsy]Y}`׏}4CB[yw+f@ Lg~?we};*f-穬~< ջ>zW`O< N pJh+8; *9**}(pUծpU4nWR2`׵+ v5?D*~t3"hnķB"D_GL)yw?fnHw~Z.8bzUYU5!ѻ_B4, LsA Lk;L+/H lU1¡Y;\+W/1 & \s-ZNJ`W2eW{W'^WZ7J3}pe$ͦG?~,]%\љ˰J%Ecsa5?}*ެ$a̯s][^22 NF_h1VN"8W_ٱݰE:Rf2p!k ֆ, F&sm)jl/B}jhKx͜F=r7dt~z,|L w/C"$2+Qڮ-8-5Zɏc U(| ސ jN8s *tMJ`& W%dYlkzzRJu.W}`)H'Z3QO]hNd-8Ƶv凋V+JY/.]W"/]"B8(i4U .Ӥ-- % 1]_F7b{Jiݣ'&ɚb `};& (ڹ^aj}UJkB{^cf^jHuCe_mUXW\=c{Vs--j)t5&!H17E ,3h?v/SJ=e{SO ԘEAJCĘ6,:$f١ZP++ ȃSm٧5ݧGO4?1w;NTFN+R@c;D79@V}tv'NJ3N0wمmL^RiYTDU$ k}6`KniJz&Vm bm7oHY˯SꪞEmFUTQD&%f.d,do"h-b9EFmiJŌJě%=)`)~>EB&k= |&َ|jXXmd숅UB7`^E46S4mewnƏ;GlrAʄd31̆B-F,*H CD t Q0(() PC4E1R&Ojq:+/]mt%P{`7x&TTRDA.ca4с2 =r8/TU0+m|@8C&ϐ ! G(c*@:+a5saK 0]WFD>  XY=G(]h!F #.1K^)s1ceN,gLTt3 r,H72P#J[eDfvDVI7sʊ.wz( 5ߞK̈́py8p v#IcUP BNpecD "0 `X9zENB!{%AʌE ]hF.rZ%)Дuk2,uL,(s`.rSMNI_;*rR܇ 8 @ǬN%3<'O>o{n]cil_omm_&\s,̙I e 3V+a fȉɨd4Ji1.?{WƑdJ>ٳjѬgXGayP)eZY}" IMQήzDg O3t|nKnRj9 Gq'Vej e$"f%p RoyX׍uvpon#[O^N=eUā[+\Rpldp"8K(,0Ldt^i2+}C+%Խ@Qnn`w CLx)O y E}f\IO: " xsNɌZ]CJ#6Р1>5ٞ:9Ɲk4~\/˫/I3Ӧ:[Lg]`)^] ^!?˫aʿi}*c ~iH@~uq> _75}t߯ {8%FHb~i{isZ쫿,`5󓙼_Ǐg8?Uu pw4>!c)vNCn8_Tc9Z ))HU2%E+כ%HCŋ`8kGKݩ 4>[ ANzC4X{pT !'0eoFydVy9Ń dRP%.(=qt߉5+s:s -g6u*1ULnwX6;~] ֨~Jlv+ (Z>$zL3~Y&PB[ 67^ }Q[30P3n\߾^;N\ $ti9΁/ߨ[&NJwfҕ`MJмO-=_k~+~l"܂q8Ƃi\)Mzo ]e@s|ӕ]pD$W'n!q8Z7o~n92hx&nqDWF>; Mws}][@tCjutLP3jdq2寮2o%zh\u VdYq9}f߸M3s lsbYZ ~;8##pItYF$Z;bQKX5S "}P0wB^ex&ZbGx֎q-VjJ(rZ*ɹ6TAcF/K+ŷ4̪I4#̶6N-2“Up􆣔 oƓ:#Ic^'D,u/rvpo$+dR˨ _-ruv]1:T]o*2:3뮩߳eVqd,5>X!`ЇZSoij16J%z/&:T+6^@Gc8((8v0)&8 p'8w` _qkxC@z+$zx*#OݗHx\H9sMG$X #%0H/{CfJK|֦5Tg8UWr&LIxjZFe)A@8@AHF>wlܱ(&5t41`HCD1 &R)5/H0r.p քv!6ţ]#uRYU4c,u K!"JFM7{,|{U0ȝa#3GN02 ]d'OBz;!'>s]&.[A}8H@ ggܑsNڇxm\ ]C$GlZڒR|,SwA=- L[H!2Kfu!-(ߣ0v俯CwU4 >}+R^ )`g)Oc -jUWݷz*oCH]X;XSaf)p0*f0.m}@xV߆I |wszb4~?@IJl_vk.A\:4#/p E X_+~n0떍Zݛm8,;hnT=gf3*0URc f"/- bQWnS|ou)>zB$ǙR :~ -HBEc#4wp j4ǩ FE7!:tvy|{gqoR hNP1l܅˟AUVg>u{}!E5yR)aS gR=P`j bzz$*ixr>h9@k1`K`+ )ǘÁ*Rxx[W%;lJI'Iªc'ݟ'~DtjW7YNnwН:#?px_u쵂<L@yM>jb w MX5u[9_'3wiNߥkmɅQyEw%WrTЀ>%,G6!k32P9%y%,-7-Bg7ȍ_,{Dv5M'3M\gl:q{&PTQNU>THd pݕL1^+i y-/}&烤>:d)~0chyYv~1 XS)9t7Ez]\A~x^O8᝙LD6f A:`ZAI+Ơ⿛HA( *36?KD:[ppy J .;3=l\M!x 7Mƃ.!2Un޸,k;(jZkg o &뎧^Q)I!Vj$]ye[Bc΂גe=:#:gι]=qWFn_xOwS-vy0w+_;NܼY2_s宷/f{л/+`(ԟCϩ|jq>vH+?}&M풧p$u@&/M[_r4.wE >擊vS`<+`5֮~2^ JUЌB)R\UiC.Ѩd8FKϱ+(תup\p|گ5k~OB `!QK%0$uC2%S CeW˫>89SФbpjv'fuw>|o+ǸNglԄ[˩R!)°Aj, ;78/ۗK>@] T y)Pm'c_>5Eڛcx҇ڎj Pn=Z>3K9-Ӗ nddKK)FDoAs}q314E6 tTI- sT"*9!`!J` /uYpI#/hFn V`|mg.)ʣE"%Sca1J.D8sh"W,۠ϤJ**Ⴍy<6vPZ-oA:Bse0Ge3Se$"_jY"MὟynR*wjp#}s9~#O{VC#A*0!MnmTGv ~y)p%C|)5^pQL}-i$4e ~K1sO&7\ z1hTs+x*1g`Y/b brNQ~Vǒ>[kB 6q&(+MpdR)Kc+▾b6'[@d 2I~k ٯSNl2X9CcDZ5BճHzzݚ%=)73=x,0stVXw'B8X.#o|DBG ,;LZCJ."{cn$.I(\ܛ@4:&UEaQiKЈ`@7 nI2?; ՂJAa6!w+뚲ed(0mFcTgzȑ_i,0g"`Nv3y81^OdcɓdO[%[emuK,6_ %'HLW\y ҲFG?hSMMJ\1e=Y7U:˳tTG4R}%r&('?nH%qqym/W._>}o~>6?QWDT[y~Vo-»mm!uMΚMs#hv]E+ڽe5k9f_C,jP9ms竫䅣yX9j+ZE?oZ87Lj<>_5KUk|xKowQ^ݡ93;]q<)Yڍyp[>,Ni}c .('53 j_T֥2=} zV?k$t[״nΩYlؠ#͆v&z(0.$Ja5M4l^(K5K& uWaRv7JnuDw7;#$,*@8a5(I81huDDOZ)0x GQk 0JѾ:h)N96 lH8!nVpF0R}WL?4ЙN,d92Wv C&ga68{_C)͹\*" eHaVX+vTÌI].|-@j%܊$RsG\J0&\r,n̑f)A" KT3Z#XFXF3:hNYgige PT/ynaE+጗̻ dQ3N=W8ÓLyB ti85_%TTLHNoYΒig`4(aAȴɇdL`^4+%HjI%N62P2cšLb P΄E|&8uRh0R8N^-u/dl5GL`d 4XADB ? ̏6k9 h"w&;vWWݺInä4rܕy2]m: 3rW(&ٸF\\UR~FwŬ=#wVٸ,n9,w+Ω;l.6[Ku\-OT \] j&*"{Y?<F|{dM W=NAa5csޢkv5>4%W#7,-*Hlp`UC$ uyt>iRH/D BK׵N{۹U|A=ZXt.4vO3SY@h TZbk/E Zf#1LK'o<l8=}\ϺSfɽ&Rs}-Uu1|jb5[6#͆{\SBAJRhp5 GRBjPH A!5(eN!5([H RBjPH A!5(Ԡ8RBjPH ABjPH A!5(Ԡ R@RhM0uTltPZyH}!=gV0quAafg(.#\.Pyp5wUYf򒛵+3梑jg6ihr{oWIBK VEE&ʍډ&d ,I1Lz*SiiW!F!gsA0RLj!9S1G(E8Ky,]g܏'^ϟb%08Ǹm9r]%9?skedI6"heQK|<%-9!D@$&vbOk6^&i[U3]&ߖfG1Go6bu޼l笮n6oM\MշIu5l:qJ{l!H{,CY&CCi GcBM܃  ڨ<Qb`#0_VUㅬ.zY^wh~8}_8b=T9 UN`X<;~CIT!7~{3Cj۱8:<6\CβhxCϖ@ -Osh1 qUg΍]H۠3t#X]$ Nc\^+ $1TH}LsThŴ{vs ΠY. Cal&I%WE"=2E!% gx.%eDPЗ$AU @=|ļi O]:zzC8Zͥ:s%Ƽj!baDGܟq#Y~ HJ dRCRxI3 Ym`~I"d\h|pa{g_Ո}ԖhI]w%7ݧ}e9rbXkpy1PS]LD,*ʂOVQ@kv5p*t_j,^M[/*:L+`ZKІit҆ 'Bk4-(.hO'pFPqZƗA1c]!;JCed:a7^r:z$T{ŕW: -n?miC"P\P+'LvlR3 ù)ԭ!mq:-$Hp4 @X(%e'瀀40%odRuB^=& h̏asAr~\~0>\c%TBI4! coNlvG|D#է[R)gR'g4)0{\}AucU88~s>zMq*/UC1Z%[nl{"Ԇj_Gߴilo47bm<{U.[g Qv/+Opj5Dm5;4TNGk|j|2yh.mnNڼ登M0ofPssSӿ zƇ~/8;~(kP꼐;] =僿Oe kZ?oMT,6vlЎIfC~;=ԏ}D54~ך07Ѱ5;]οTSiyx0:k~,hت怉1>, [Ի:"b GD1%IXTցp8% =j1&rQo1huDDOZ=$R9S @@a$( $t} uЎSfr6pm@ f)ؐpB(+8ݬS`g:B7YyF7: @RSB5r;ۺbgnw"|ww߯9lwsjW ܂`p Oiiq}45M^j7Z t" bpXyV߽ [ 'boI$W]6D)tRRD!|R%k9J% :x$+b;{ QBoȩKwvf^O#|'@'ܫ{J`Oa~4|Sd۾Tj#)i/ @EE :1|k0 mznL2/GqZ)CT"tJTģe/Jrk)4S7i2LS^x`\2Ķr 1zd&m> _ގbvήxQB 7t:DQ%H4cjG=e$+P@}7j 6 g^y'Ι{A*MȹR,&SP4 LHFiPCuR21C7D/(18k@PH޵q$ۿBS~0.ֻٛzFOwa˦H]rz8IjZ#{&g35=]TWWy92/;Д2}ĹeVWPoO_~}893*EkAȽ6Dƌ4YJ@Vt>Cԃ ;*jRT*yY>$ϚO1<$$VH-Sܠ$#sV=_Z-iV *_e=k=HE8ҽ Lvdž ^E6#BtrUas ٜWC{C9?Ռ{>(NLK4} XdƼFhB袶B ǺhT̈x^YrAГ NbG)L I %cdXMXVFƞX(+߿߀µDg2PPmyuI7?x /AUV9!`d[*edYǢVIiE! ED^"nbeQƯ(D_r`4AXAƏi +A:*΍A$9w1c)^Jr%uJ318U>ϛ$:x6,sL2D FjYtf% |I Tx!xs;)}ycܰp5X&[ΠOfq7RolPVߨ?|>. GZqp5v*Fē82& 9_Wz Pr7` skDNBUKh3^F.i,K,;!R 0uUY)c)~GKf&tSw-r]JA2sVg70L07#fv/:[>wsݪZJ6*A-_hfl`CMmxɻf>-5 27ha-f%kͨdIi1> xtx=|iȣ.+JiƐ4h i[rkb#H)sE,AӿD䩂<\0.յԝ}O9֣/Ilń[˓FC#J"66$Yp p-C*D mW]J^5-6mC1v IFQmfw?AJR / 5tO۴a(ɥ^6:"sZ;2Jm֛R/3ryYxL2y.tgSt57=iyx90Gߟ.Ο?]"{e}_g tF=>姿jk:9Ob,'rtYg4?N揟?+_lp_/cK^}7zAH3. _X[׶q| .O8m/A3gߗglS/^=߳C+lN?k;Ç"nYm\]%xHirxX]N]AI'WNӬ]6=Fua^Ocp6^tk! P\'?eq{8d]OoI'E726vm{N74;/Ig^~.Fvg; sc ;X'Z|3]Zul_^ ԣuu]u-\[:ƶg2_ᅥCA<)kz:K]*1ZH-pBrx%r<@?Z~aɰmEN_lS%@S6+Yo4ķҏ-; w\fHSUjf}mqm6npI{!hUpuJc ɱ9#ݙX-uDRWfk^ms'] @]ԥY|A]{VŨMvW,|9L2M!c8rn^N]'%_6m[͜d=>f3ްxVҀo'"8봖!8C%G$ œ+ 7EV*-UX U^[NƱK!Y(+ 9ؠrL/ W U3Om؋^F_*!BEXwN_#Jea=yQ}5'b]Ɇ [atBGU27RIؼs9bֻTH{NU)?{LfJ&Qd}齌"B9"G!1F[75#y,0+@Da >%jxFUMJnPW!4+UA[ sIL| g3=)B֑[@w*P[ځ'W:Wc{FsNPf " Kd` ѧR)s둎R(Z~_}i&kx8w5ص; gtoKC Z^L3 6 :\?}&ODG~/g3Lgw /J,I\g?nrXiW&TD"-ր"a$+wa ᭞q֞ߧ[o٣'wc@HF:wvM:6]Yv[[ҹ=W[lXKhPhy̋:;ZF,f|ht6kG` ><hl|=#\:}ë:U]wnٓM'*ZRu\jyY|gg\Hh ;E{UZ)gUAOjOŇ,jML4 sL;2ZƔ$FK:K*pX,PDj W)mG>z3dI@*ʄªb] Qcw:=>K {YA뀂/d=3[3:=#pP~Zh'.]`$~ 4FH k(y"e"e/**0Qh}6NY %g6ȍy O1zd_ײ/\y2M.1:Ǩ@鈁Z/42W쪉s,-.;'`˧ӪXR&ҼPxԶ{\V0_%n^5`Z3ثeze*ڹ1LRcB߸ AXOʳFl.6Qs˭rR/l}CV~w{؁ ٽdo2FQ aDi_\c#36( wQcHF$CAi5NV؝~N%^hD-Q[Ǭ 8C"C)HPZ ߆}+;WUujZ󖁻w%>9Uߥ4_qgD DVCILJ;01U봻QU~?^MZˢSѿoV5r{C/Q^@{1b@龙i1%1YI's'mv,m&L("dVX;䁧O6e(xD剬:tH7]xf4X8ɞ۶矮YfCta[G YG^\˫?y!X')c"K>1󩑚lszAe6t[Bd9eș,rm,*?H./ sY,LGې%6. MiZ2-r/A  Kv2g!3 YfKKyE26bl[64ܿM<\7ctLEyb~g __T\Nѫ7i~KGSX%Jވ$h;/L驪^pEz<~Cc`}[֛%U$>X1AyVP6{]#Vדp.Wg|}JzXJ7Ir&|1P_81`_' )eH'됏YYFvr_ ʞ,ӷv Yg/Kr~3?YgyMnOJh#ALߣgrȮI]=. r+Y{]g~$ڂE*lHnRZ<6߬i7udu s+ѻMVgmاn7%.>E~Mɕ+6>y \GA>L}] 'XO]sSͥx}K $[!7^e_=mx4 k6< ײX&Z'~5Rj&;xa1\*\Kn8^B/W+ˬ6:"B`/N(W+VQ*U3ND$WtjǺ"\ Ѫ/RGܞM SU=\)z6#W(uˬ+WC\/W6=<&"`+u,BZZ/WDɡ\!\.WJv"E/Wݑ+\Yi#+>pE0v"Jz\ vV >(~?xݸJYcx# FJ`>[.nNq֟Q D#F\u-2JG$lY[NoIYQVv"~` PC 3)?Ha> fpPjkAlDmu3ړ,$Ji\Id9G֋7^/,v?rǏl$Tl6)3Q^JVX,2\!?Ԩ?cËwְ<~f45mrEP>࢒?^_(ȼO x"gL2ky2UG527)meUI Hd\wLro=y%4oZy~ۜދ4\C#4$>x(NJmkoQeҸ%B-tҼH8s.-Ki0&2FZ@`\4, hk([׻Z@C漧+W,>B&JP>sJi.tLa1,y<7pƃQ޺\i\i䊀F7}fQ|D ʕ@La1M4rE&cura1]+kՎ3yk6jMZzQJU8HXhpM4Au+Lr0dF,purU6jGZf]A ^mz0 h qeg֣m*p=J+u/W+n8\!06"xh-k\\uPihH 1='NG@Qob4;L#L-wmiLwQu b+LF#WtG-Z[FJm/W+=krErY׊X iSm+UJ;͚OW  \nώ(-媃reU'XDAµ҂m+-K.y1~N'W,YVzrvp7GkT(媃rŁs-#+UWXho\.ʕeHh q]Ӿzܴ]r6]/W+!E处6E3lb$H!.88 瓪Sok*[mOSbe^ed ~={2%O{DN?u2<[M5,4ZV-$RSI7ge$8TDVGZ \бVJw-tѵF$W,!"\.c+m z߸\iIN(W+ !Zl[>^"W "+"}f3R /(WVl %hpm4H ⭗+ʕi1HɺFW7x\!~bm#WbϦC&惎}Wp]îZ:0+QCD/W6=H D$W̹FWXhxPR0UKHD$Wx,rEֶ]H/Wݑ+Wܖ{ `?64JX57>UފU lLG F#DZ/H)e2-BD$W?0/^:(WJ)c#Wl,r/~rEm;BȕV\iehhn\eN,re4XS;wE<"Zz몤}W]+%ȘvD c]!.gp-޺"J[W]+qm\!0FGj'Z.WDizՏ#WrϦCfͻ[հ\u U@CrU,_֐+աMF\!d6"\pjh\ez:\qj||r+575%W\uPжbúMe#̎$ {Ps?xqM:_P7K |ey._⋗7|qsɮ.[B%?~.S^'R,vxayz|6%]QY7YYar?5 ͿGzÊ^~[8EdtO] 7U3;H)R(iQa9YhBA`ex!4dAQ·,n!vSr1(CbqڀeV<@g b>j^&Zz'-Q~^pF<R6V5`͚rN).mu3V':ɪb-jSĻ?߽{mLTr;_nSUf&Od0EL1d?/UɻsoԷДAݪ|}z;ڼ@v>5+?oa;ofyu9w;/Vu8)?)NƟC~z"n *oWKXvBT֧~ЌuZwXPJ¶l>NfKRTT Rm98@#{xV=!6&d[n{ zI:].gMr',AA/}qrGxikԅH%%X4o `5^IFĿYRۆqSPϟ+A|n%vKtV6f.vT~v:EkcC|VMHfnuM/۶7_/U{Pig )I>W%W>xK6oHE+۱/Dⶠ뢞6L*"S\Iu9Yfx'oY(Z*,.)/M5Y ˹L@si*!u88KY ׎yaJq3^H @"^(+=Nx gqVmfaqu>Qs} mdj>ͽ0y2Py (ljEa2=~r(-{t_Z\Wry*x&hJ3rn$.|VXq/]!t/NgjeINI~\˟lLJnY]UmY=?]h*nF:f3,ŠkR[4r$±ۛ#e n,z}_laA8d1e|``B8>Jy2Rlrn E:P)"F7˛*fF!)k` *h9YTK qq_=Zh4C174Zš]:c ˚N2{4Jl[0#Nzx*NlF RBP' (![ vdʀB+/|fW)JˎY]5q6#;=rP9ȡrc2<"8b#W_'ۍ28jѽ^Dv~sv<o,L8VI=|mÅ܂Yu&f(;lL5ڡK9(C2g470 rKVNy(nZ2Ƒ|:ZŶd|G77~Ca;m*Zu<.~rk=^{x<}!X ͳ#`.Nن(UżFoKT^9MT`JD"Ir` 4ALݴ4&Iɖd:C&AU,6fц\VMn7 %8ϧĚo5$C;bM X! t|$I}vZ-бZ,Ny-m"Z|Yg6+QE{bJI,(XB»1,_rR\ˢ{e8 :DPp_D"$zp C[o6 r99vl-q6ëtW^?zw\ 9E Ztq[liEbqEBQZW)R $1*VJ)R]E%yh ^ӞJEYPY@g\DQՙHBsnLNO3c /] (*/|q6()~8|Ob;_8N⻹r"/SS/-h|r9:/J [&чo*M2]vvG8޾|w/߯{嫷4߼x4K4z,Dp"twxCjCx-vUz7٬}j[^3 slkogҌ㷗iě^*֟fg~מlz42]9ҤEӷ/;"gU͙zeO^A^Y-J&ֆOGKIM8zï73p{tNVkyq _ӓӃJs:i ;&Vg. Y)W*ʆ6FɭgD^ȯI;iI| X-q" CJ'lL!zi2#zh aEM=/Ÿ DMDRC P1=2xɅڻ(Ekrt1ӂ٫[Rzk}'_&M(P y ZGNɉ;2h0.FJN7ΧܔeSd/#nDM_]]m1i%]m0?ŶCZ1>J<E\ OiV}o`ZԺk`60 AtZIdq`*0dH@%4S,,rpRrYJdN@+ A6}2"VO^A{OԸK&F'RgA t8υy6W=+;>;g=4(}?1c=>L)ȴ"UQLlT B:Q8ԙM•rU58:r;sM#sE$j@f!TғRjTgY;Gi^Wؓկmr51c,6ՙY*Eپt)WRjlBM(,ӄȯGwП:[NSzy31VϚePd6-K,F:VV3@+bǛ7@15fΠO-#x܏f}ASpׇR3!\r^'.Gx**bFJ)"lLg"<Ɲa #â-2 !EAG)s<:eed5rOclH*9sY,Tzd(RNIzAz \RF0 C Xm;kҽh OǣͪZ:n֔*θ2EKDGo/]VAy""|j% R5䠒d`4t.}ԝcxFQLZY/(J]RŔ ZొE)5I%0s D:SwJwv&8xd>RDʹ)&*hF1Qh+$h7dyibjZX~^ktv[Q80IE]?lN,3jn^4Q<޼`ȝ\`.i5[J$\@td]ehf|'k:ƭY%JYV8]0*N(-IO|'&fo^xyWy.WϦ^O_׋S^,R>w~We89ϛzM}M뿿#^|\.?uqIΞ.y9z"cK/%a^?ӎ\^arx?Y̛cXY juG[UdQ2pL،y'f={d^InzăN:E`lifUhUV< S&$ h*c<`('QX'":t+gS}@C' x"G@iDžSQޘ\OVBt2O^yB( :zyiEF_PSvhWС]{YuUɷ\|&uA~d{qU,/^"T%{EE8^Ddi,%e2\Q$*y5] آ!0yG͖׫oَ* ]: `E !1,9Bp(s!nG=$*g@Hol%By`Z ӁRРtEVqYI3 v"7 *=`YF˿"+epH1/wZ" MAG$/r5R4(^ hi49hʹGNX H/)bc$B`\qYmcLD"vE"I B-;#gÂӻ&)C&K|%g- EրB:P46v(^([Bm'di+vd=!*:F"^|V l(X@rk6F9>1OǼ1KN`c$A ؀ Rm+S{4eBzcs`WkO%! -rpE0h>R8&S8pnt"X D]RKS{ C3lW{܊'Ɣ7NdJQ:s戇QiLFwx⡍4,x[BJOq0aOkл TyIKT.`M,v Yx,HKǼ 1i)N$&A*e 9e!4Sf1>h $j hLJ^i( 3%!{Yv`}0Kc d1xE]Qx5: ^nqU?/p7h/>\[K/6\z cgt~pFJ-ӧ/i4(Jn(PJB2~@5HSۣFz!z!z˭TZܒV:IRE^2IG$FCTI 5R7}~#@G!)@` r--b,uvF&+T O. Yx{u(vrgUg/]6|BmFV{7 ۾a *Z<н~Ο/DS{KMPRrpځ!ˬ7*@IRK:AEL=C{C~w9>[؞ ٭hސ@ƍ^U9"-5 qflI% t{Aw8|~Meu@rw~\*uP`N( Jy>X0xp54 `ɀBmu5u+ۖP߹NYInײo|G])1@:]ϮNv2uvSqvEs̢hd,tщ2 R ʲh~V*n9og*oX}͇>/۹;sW< o0g(0-ˌЌǽ !uy֧gz]`4*R(J #L3#SM.`C~Ir۹;h&N ]l}޶ \Gra ;waEAݾP WI"ĻiMy*պz4ږNr:x4x4ښ-LΥv~(ZEuLuXp\(lDSOCHO(:MG;|id}m $Z%$>ipDHӮtglnO迺:Iw `:ԫ P~i~>,Нmh-UuH֑}xʳO|'xI桴ݕrWdU\BlVҞu}I=ڒ-C <4J~ ጘ7KS}tBچT &9!)znjgZ*FjNcp0u!r%b&;Bgt'΅UFYn4h}RGqSI];9z7q!I^KcwNn^4՟_\^)~Af*\6j׮&ط *׮Ss?ʩVum廱 -ѹV%䚋:hў7߾ 0v goй1.&MLKsX2~0 S s!>hTP-vkeACc̕l'Ӻ6L3D#WoM:`Z$㖌QKEE6ӑBW-zNZ[D$mK3nzt|=0-n]֣;V wlA];]5w<x䅑bڻ뱘@< z"z5ni){X_x}f[}vm .}Ӑl%0C"mF+C΃֔=rvy\#JBg(=a ?(dbߛS: 1k~8b:ƪw1TB1Ŵ`89 qk#2`UV辋LRzJ2΅> q BF\er9qwqwqbWOP\)>$ DʃWZ{qT NDR.N/eUyT^\$oKY|޿h.rDk,^@՚s/$Ԓk?Pil6oώQ YeҫWvj3yPGq**} ;fTZdʅI$#:6(#m > F*HH<]6>+x;OTjF{} WE`" <+&F۝Jߎn'6 wSV uc;y;?oJ'8EJ\R4Ilc+G .co6o;چ/Xm41bmY8BemZrLI*qWRiT$fx>}wA.]ŭJp+ZWwDԒbD&i&v Kԑ%LN"Uz](Ph e0#v 9d' v ;QBʗ,#teaAY).į$RN(˩v K*RYh*] ^Q34qnyGWΟ Y "iDŽ2y <:xS`Ģ.&nB1*{85TGXFhFWug!U|GlK46fb/K ND$ϭv:S ^xDn&(g$$KjXKFPo y$ 2f [Ѥ\h*Ja^@GdL-?=O1{G+ -BAAvh6H&4vA%%pm|%zj 1ӫR耇v{lOv{'Zild^Yv2[騅7*j7{5Fk,8Id![f %ȁۺ mWXfA\I_-%-#u|#I_ˎ/oCɝAEBwuR-/_ IQ)TSQE 53UO?U]]u&NH4]3&$/-s B{OcVu䯰<ǜuTӐMW-AK ytJӞ2/n+p5Z"Qo[S{J lЕ_BBJʊ2mH^6A h΄S߷z@{Ř de.qJäQBeڗUfv&K[]j6 ]u,q .oyCL_Tݮ*[g0CYac*YS@9(C2g47 aLr"Vx=]ӫiS*=m@`luܾOJŔ*,DQ~XL JU%,G,(p)L w+A$P7_ e E$L26aK:&)-DxE_K*>UV9y\;I8M΢i%+ZN) iS1"-i]7Oj\:iN3ow#7IdF0%Sd9x֑GܐE$O4QtIzl'Z{l-8ۮ.V5}]M(y;7YZm&Ah A-u&3TNDqԿh+ף6R sK4aSR"2&D OVJR]CdhL =(=$!dE',d5CoJym gg{%t?FVg^}~KAInF ߋٖ6Rb7%MG88 Fd|C ssH5HާstFlN eH)CRXz/n(tTzu_W_/ jWź^lpĪb-N=z$ŭdvkLkwtzV4lx% #uXbE< +a$B| ;hd\OA$/=HWVèd G #O!:LpIg,+R: PHWn]yI^x3!<]8Vj&tPJ5/9<\Gҟ`O}Hƻ}j[>4LNi`jRto5]ƪk}6r &g(ɂ,!bcB-FX$PE愭xbe Q05 M&ߎYNgLpN<%z'b jWӎAmz}4ؤPE%EDÒȴK'Ud s92"IUcy 92šq$,j"-e>Ĭ"Cr2Vg=N}`=> b5yeD="x{+(dd^AGĔ' R*Z$}zg9c^R ڐL-H72Y,#%2"V=@8iƵ[JZ).ʸ({\q6Odryqlg2 Z(-,cm$=.nwiǦx6-@X&qwu&9N-$w~ 7($;$ُ‡r3|i58'>a/x~]\Nygz獟=t+J3^sƷ.'d~)Yt?q9ˏOZ+l/좕L3IQ!!aukg-"<տ̾]~ዿOvOQhțDpo!-WWMt<-mc[hjav׵?{ /ߟ5;:p>sv>M:ݧqk;(]tJ~iy倴qpqr>z:>H<LǷns]eekGO5j#u3)o7؀`RLo3k[]m)k{#Ygmk鬔T^3%J*y aUs4՜ yUڧd% eֆHh3MZhe62z&cCSJ8SWr4NGBܼbiBЕ;ضcΙyC_yV{NLdӊLQdAH<$ p38ap"SՂq 842I)D lQ؜<7FZz2o5q[RKva,*=?dk[5rkOV} aFH{'G͂L/[lQ%W#41tHP^,ӄ ߨvΏO\vnmO$/c5Y!IiocRXɯH" `;L߼ I~N}O%O8R3!\t^'\/F'xPX`LX+}41="l.Oո C7Z$,R9:ee2,6x"!'Rc`Rfҫpgʛ8rW_>*׾uk+NޫzʅlDq?%kHŴ5Aw0h@#L( Q9C)#Fv0v5q:f]TGWwYw`di6 jco;x-XB#5sF[kҪ| 0 Fxɂh!'@v4P4:R.ܚL*[J@0- hr(s&Oce&ţ3t3Rul8|tfޜN-r~n:7msq|߉[49q\}֬9Kܘf6niS/&-[zr HdZάՁbe,pH~*WԚef΢Vy}Wq?g{:8 c38͟^^|MqZ˃)~u^ߘ1n`w< l/E4xߵ4q~7e9]%"?j|9.#0yl\ߩa&9 T/J!m "G31VRs%JJeJ8P㒲pVWID5~tP q2i3!պJvqKTx4%=9|CɽwWNߛRrk~4@g~cEsޱxmn-l< j1IM"6S5RΰMMx3Llb̍'; j}.n;<ę>\G.fZK1 J#\nt؞>h+$FXABbS M]π` wL(I=PAttv~X&=PӞ-za3th?*vBZeHrrƉ6h)-pk~D^*?Tz(́>*qQq:<ơ0E6qO8C)01GJaQ'oҬl:|q6LТ ։R$J Ĝ\&֝t.SRPr))6\O ҋR~5NDzgEHFt"̷/Cqkyp18;JA: :]s$Ot~ёF}` I=N#F(ɂ}j닜 QeqqHoͣ)ziEWo]ܚt?"o]Ic^ HfLAC$s(H!z"hsڱsGIh' Qsi^bUq. I4;AH%%c>!QRGRx Q$>RGi^j*C`3w,Ѩ<l+q`ơxlUhBm[5(qyjxJut(-D&Dp#p4IYUʽbr~>%c|2%RA,8h"QJQY ^+4reP&$Tedl}v(eθK8Vnp݁h9B݁WTkL~A+hSUkX$b\Hލgs!z6 tggDJr6)#S @5 I98XI[ч`hMlܻ:4=*Jw)X'FJFbP<]/rvIT gq6mܮrL1S/dH)"~dtpgg1 3i IgPnxQlo)6.Шs#Óޡ᛿}0 L$Tj֑Vw[tVBdɎ+ck9[^!r#iuNL]9ll?ƠOuM'ۃuYn'a;g[vMA8zr3?bwSn~G+ ^uKMSw{`m^X~tױg;v=wݿ7#WjCUusm򴩻Q$j6QtF Z)!|~3oF=8xV-=XR t eTS- u/_^5cwx/Z1zd*b0CP+j ::tÛY}ҕou]!ze9ɝԆh 1AXg=q+Y\#cY lԨ|>#1 W-SANhn9ڮve]Ekn7n!F}$5pVRNS[!>ZwdN[㛪LU{b9E#T\12N3?,Vtջ+SL{s7TA>}ZXor)^6nK/֌'gaF~>8Xy.S$Xh:&=E n%8Mb{5'AwoLb԰_mGc0ZZ2a y2_{xc4hd ĪhS7[{χk{O0L8ď[sNP<˅,x;rjl>{nm쎋\/SGS =*JΪ9+  ⅼQ:I.{Ih2nn[oXx_$~ Jnnhh<-_ څj6~gm(xR|qQ-u]:f5^opWtR 7֑rm[˳6`]Yznڪn*vd.m9@Uo'OJޏZ t:%[7 "ݧ @ !$N_f0*lj-ZL]3;.B BJ<Og;N!^K-NɹMOr.^JIJJ'I(%ZdDZXHHeY#C;6(̸1Ec&ŹγCD'“hlr&>^ [8"3 +gv1d424hB!SN,@.Gnb0`ԹK2-$heJr)MZL( .?#&jUȅ-H2ՈĊ=tTU]21ڀK#M^Q:rZ:8'WomNh*O5. xIKIQϑ[pI6V䒥hy :ʾdԚ>T&]*ZJwBL҃qsBj1| ֢i)ObH}-(OTQ4q#H4Xƍ zJPr8jLVED9% t uf)k'vMC%8p߶a]D4#X#5e po!)0DQ6"Ar]nGO;X?j]1_6\lMʖ=6%If k9`mPU*kGU0(|zRl3(GN-ljU1FˌviBC[+#XBȠL d׹ѱȬ2X r,Kv6L+u z,-V0!ЮpYV ++5f4 aom"1#  "<,f HMwآDq>UFޚ<ݷBPN ̈́#Bl co0f|)2)8ؙ5TDX[90ȱ#2إ.VAroSCAJGgX!(ʈq[EVWfhޑU"4#|!PS*7]dW.#{/VZëCYHI(e+Ձ3"-(^5 5uj Vh8Hil-@E@& aUh#ƻ=sNK Ʈú7ᗢH3fD' GcDH /}0| ^>]F{Ǯ]-j{ou/ H}N##X2,76 hJ)h+9% @+XȇTzXdu~(j`J4%IƝ(6d' On Əgsn  @n@%|yr 9Fg#TϺێ^")}XRrhD2ZuIBwp *  Ф xd..h]"(Ppcy0eI }p$,BE:QbզX5{-6?fH,I#k4ZF Xo-R@tii魊Y_XPh %lئ}%E Sa![[ [vc=A;37v.ܞ:-i'v2d5P@0u]V$L'Knc+q0`Cӿ;B'%zom5Ek)DzIHΓDɃkBn2&S^&ް-Ҍ$롃Qf1uA:(Xf$hTQDekz [BcW,P|v9a+Bq&SڵyrSpmM wC'y/n ȅIV( Lj0R܌EEH,{aG=u#ZqOh-RڈJ5(Z98)pcE f;SHkh+M3| QY7/Q 4d Z W-E\뜼޲NKsP׮BT4n%ZxZ*awhU\X[dڀ@vcCX©tltEjaӂ3 ^PiLօb4S35G ljrh9B4wކ`PM5墁Utid"|9\Y 6iU.!#CR,=:(jI-0 QX y(Op]ghuECޙ"B?0@_mߞf/nͮnɾDd*qB&ZzjHzk6%]S1MHzdLU;K[j֢l*߲~Y@tϿHGP ֽ8ɆeUQӰ1}) r iʴs=smѮ]ةFg=_>l|9)!oHēyWD"]PX:LL#Z26TcF,R5̜O.'>PFy] K7Կ[kdzևi m:?s'Q av{V<{4t~:iTf4Y':iINuҬf4Y':iINuҬf4Y':iINuҬf4Y':iINuҬf4Y':iINuҬf4Y':iI\+Sδ?J (`1^ +c%KT9}`%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V*\0[?J (`mP^ +oX @:X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@zJ!I hs@07@n"%*@d%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V(nGtytkW?xN[M w{|̮ϯ jK0Xp Z7¥HC^+}dK.}2:m  `Õ\lczpu\no Y^$\iDr_Vں6 ʕ'}uVӗo{a ~% ^up9Z^Zњo$~v ahxJ:Ox`G\_.^T,`k{389k8rb ~s:>WΐT |g)o`o ?M'l~o^v3K,Q݊f8ϻg/ U}j:)n: VBXBޔ[?/櫃ۓ Zդ `(~sN2dmSnNn81翐oqӶ~qxi)S٘^ӛֽyO⧻c ,9O]55-5S A# ->v1i3p))'n^2?g{vD7l{34}2E)1iԨ[=JM-`L-ZYksZ +ᩅ8`S\)/4Z;\^\98\n֙\7|B2\@R)O$n¾5ኬ W/(nf(OgoUqrNn/REoޥ#|xt1[Ҁٻi.υ{_*-oGae*'yԷOZؼ~1f뵇n&XSɗ(ehco$ >ƘEnz3IVgFL4#tO"hz$f_OߓܼJW\L.#4pFzZӰ7wo/п};#7sğ]/H&^/M'+jr?SKNd5->z~ۏ?Tvش\FIgԽRiOIT;eIUf)WNnVCo;%s0 ^/@s~~2wn*_J&B O컿FYvn3mom{}PVڔ/3v+^;ر9"Տ~=Nгo;}H};"{t2ZGuFKiIlTׅ{^dЁ:F:̜j{C욪Ut,"o1gx/'7re}=YGz] ov{ lT l)/_NA AGݷ ܌;|jW[o;Qf?]=Exc1$VoT~Y&SyU@ʔ|p2E"0sGq\ePX ?+XDvuM~_U÷_^0 ,$"[#E=q5 T}ATk."0uݠ(KU Ҋ`S;_,Swv"ĮLl&N{9#v[)sAaLP{u?Fmf[<9-{ LBGG m:#P2]sHLYXuC[SJ P& Ł8Aʵ[:%SyI]< 0`DbuET\3(:8"Iyb6%dFJrZ9"0sG_:jp=A^+.QoZURJRH&8[l:MBj15TPdg\\pw슇f03@,2 {`6k䋋+^\mR?F? nZxg-Ō*1' ވ"`n{# kyRs(ZoN<-攻>_2ʥk$ijɗNNmj^ iS6 ޵2wm3ڵS}];2~׶1h]RG KvLXXt!YR,$5CF3p`9k2Bmy+˓w+#eՆy:TB)]_/Yż/Cs .\ֶЄʥXI/\1+c28Y㺉*TU~.QڞCNqy,S&mI&sEޒ^)[wmI_ [dݽ%1~،)KR~$CR&%IIMqdHg8S3U}ޖo5qk^%=~d|:]斀/^~?U%sCW 9´EWYtyB~t#Q:#ݾUqIa8xu`͵6xwP73QcdY  FBHCc%S# %ufdy9xs(}yĹOX|-7fгoz)PzԗPB3y%=rp> 'qPAQ[r.@`,4 Co.` "Go4ω ^@ D̂S6ZV#&-)J9,tQid;AP9s;`)Qfls0L Bo[NKv*-~ɇ"}xWklq2E%£d}5<dI e# gVΗ"'&J+Ǩ3sAΗbozZMOoy6ˣ!@Qe@kqua\h8f K 4p#ؘ* =̠aH% ϽW"˽W]hѢcB=fv#8. Xj_4O˯=}onYmvr4ى9zUpIm,ҲIciZd2]6yG 5}JcCZwˆnz`"u$l2ىR i2PUjp4~%iloB ަIe W'Tӛ~uk1ߗ~\W>Ձ|8\Ϗo?%Cͬ3`i%PG(- @)Z?^~Z RP*d4_򮇯kEQσT$"D%L\+NQxq̛7NOK`Rѿry?pu6\yu,j,n1}ZllXz4^!G=qo_Ys=HR_1(ci#i1KBf8J[Q eEIJkCc%YcIM[-~F7t(`p;QX\ojeӴ9=klo RNGsw^X;v:AnN{Mq BVUJ LFře*,N;k |@5:5w9ߓn&"3) gGj^ ω)/VĮ } ?h2X߰ăN]wΖCQÃ{!ZʙGmwOG)( ^s`$78.:*KŐE&6{OO[IȤJsB W :RvR9mi48W!0F)-% 9 sL_^Wf`o"-of]){Xmnt7TąU/8GQ*I}F2]Z=#/x9Yկ{jv0\Eo$$ig3`]_eJXGd'Ke,*#9!E `+0>2b4uh."̣gQ{DGc!28(YK :ZV9]ڧ8'#,Jt< [G5;"RLYH)"3:@ݒzL < 1Fq QrƘ΃,+Ψ89dTVS&F#}*:)Bgױ<(u.H8Vp˸]!%<0j;P (K{oQPd@!2pڻwV7Rq>Jo O9@QF3J 1麀̋Qm3X]iߦ+]ִGa[Jq DIҡ&3,0]En7Th(8MJPљ4HUD5RJ4)e)hNXJByLlue&CwMtTY\րRL̝19vE4JkIʕJtIޖhq}  XNzTD42$۲9*p Jꐣ0ÿ*IZ))˝i^_k<2jVZݡJt_8Cw=;.s\Bŋi!\5⥴.ZEJ׷[~ GWQXlrz R=Aߦ% @K2%B һQ3)Y'>4}ק=%Zs[zp˟[istSYZs5A+QcO@Z.QYJlg(b![AB+ |he#-?wwKuf%`< MUd F=|7~_rcwJЯ㤅 տ9<힯N۳}6g>mT?g,k-17zRƓ~7E&k%PRYY "p/YL. "J1.A8"^_ƹ,2k)U40`Z20?7PkW縋RQG@>_(|M@i~P (E@;-/=Rs6b:N#r@vxʑ~[ Ud&2PG@#+Ar0hr8ݟX/T^'-r6B4* Dޱ$EtL,2sֈ{7S7xܶIl*M^V?+{O-bdxW<[׶:aޅo^LQctο-M <㿠s !%H(\0bvsIZlգe筸$:TqITJzOzmFf\VƼƗցY;JHlvu[]ܕz0\Xb hdlr:J*zQz"jpȕ~9?qc <a4{[šJ[p/Vowo-hv6\( 2͗@z5bW Yq"QlJZ`GOqIgڠݾѳlж>Y} iI-hS*f&zs'ޣ3:xʦ(]q=RȐS6-?I\,9q@lȍZrhr,Q|JZen\J$@db<A#+Exrĩ-^*o1R"*}r8 c ),Y V;nL>w?Q:t*LF07 ɨ-0[JYC"Tă$yN7_&zaG'I9ZdcB0Ez=Xt2~ SX&Rwf҇TR!W/W1ebHl@FҭbH2߈1HQp|IdN{f3;D6:SܳN,8 rUj nRw뻌>_=΃(;#ŻUSry6uZT}>uOn;OFMڣEiZd 5Y'^zK[_Fw<(ڢ4%sZs{6!v]p9ή,ŭo^v^+9F͕<<mo.'y4ȧybW_=rB ![[O}h\gƝ^H(ՁZ3u}q9| m'q]}l-b;/%%V*-ũ!;e,5&psQ.Z )J3:" 6֘- A'S~v!@S Q\kǃ60ql׎Ikمvz[qcJI9R4 nMQI#{1 sPi* WNeY4&7d?m!R2 $8g;uY7!]%rBY8-&KԘ#ggIKdz}K5mHL2mzfe2WJ:F%ͦBPJ]Fڥ"uFAZ-DPIYZ(WIgs%@,*eCB͂y5ynF4_/xu1 }ބ_ӽlzяaKTv0r: ٦ MƦ4H)R,UqXWa:❎0E-T0G5*t|TpV2@FO +wTu F& v>N3WNcQ>873yXP*pF%Y̠[b]tQ/Y݃7)}W!V|_rۿoH<6|dIK~;cp=+L}Z0.wŢɃM_[5PڋrE/V -xiJv I#UIP1$ig4@ k`C&ള3J=wim RTmS` oLrf;&(vOjw.Y:+L>IitV J!3+[+#A6L*_drmMeҪ !I>!g?ѥhPn4 9kl%2.c s<%=)^X-92&pCjל3(SJ+&T$dcX!HʒoAUrEodε~s.9#ϞiNk=I9\S(zI[̉7r1Z%ãeև`DHt۞s[? |"VAF,IOBғ/,`P%tigSnN!tR{Q=LNd ˄Hd@Ő4z2O9+[lg݋"w*}ܕ8 rP=F2 @PJ-/LklОd? ӿZ먅޲Rd3>:))("ɇWNG$gjY'~wanIA%v`(ak&",9-t4Yn<{ъfYk4l7-MtB$C_qbt.%Iˆieƌ<{&lumED[I(iG]aHdG[FjR6i`uHXMs)Ix鹵)Q"wbR֑zh=ؼh5 ٕ;xc4>l}sQn.wPCd'J61Ptp墼\nQ0e }Lc՚3ad!WRnܙG/շگƜ3rR[5O6s4LYn_ra۾dRjZLs]n.>j~#ox{(!5! 6 1{g/s6)Ncw6zmy[xw^i5K޳.+إ&+޿y"rx>vCR*z|uy[sHY$Q+bm :  ss:mŻt+kI=0ChwN"hM;OF )xkfe *dђcgbΣߺ]պRTK*;?j;IM{g>U%IUa5vLMEU-r#G(-dm /!&]o5v˟V,=Ԃ]2TI4& P[-l>Z.CiUZj=ǶZ%|.I\``Uw/qUjuqUP3WRi!Wñ \%E\. Jz BGv1.ASRт8ΓV  LVUwb<4$o?M55I&Rw^XU逅7yJ}i%5miKKx(b* ӢGLɪ,!֖*p5*ht]\JX/J W# \y0UA:/ J*$ `{8U`bv-|̮WP\B))vUራCWVquqUPB#ŕ!%p`1iw⪁kE\w]\Ǯ~q.=`(j+?uvpڷCH\mt,v[+ծKAH\`F\Z*h*;zJh:j 0g`U+̡V鮋z9+)-_HcRzl1*\,p$.=vy {#ǧgDYC :?h&_L6Ͽr0Ub}%zy-.lNM[jT l9~џbѧZ0gъd^̍@(S%4͉ѷa(:g5rzyIMT|2 7tFj:Ίg]#pXl?2\d +t 9)UU`--&~0k7arz/ 6TIOjrMAir~9D,^+c3VںRiXۛ= рC\)4S ΟQ:@pg_ktX5X[ԐHN%N]ǼPyRjcpHJp`5Y mPH k&"QmQ)fH i]B@c20dIV\g ("b{Cцv9_N$t$रK5IHotQ=V==V'n}鼻Jᖥ01U,'7UPv Z-{UP^=+#@=IP;'o̝] X+bhc9 D\?VNsaP1܍DMvrfEy΢"rU'̇]Jxn0hdɂ }}#.Y ҏl+(N㉀sZ&šhp$S2&ou%FGDgv|!_X}om=6_ 1%9/H͊_'+LO5߈>NLj-RPF;_\|4^-QغM.. M ۬pG$#R{D1g`"jrȘdVH>*#'M;0{RxOMø$72ir$*t}`:l(a lȤvvpFi|}'[mT5X*c l!SV݉Ql|}q:I$n%p>MNU5J!3+[+#A6L*_drmMe4 !}BDr`r`ZuC"Z0@mnFOwU ʵu\eSxSCBRL:voc%#d8Eq@g~t6I˼;C14*gZG 0tJ" ehH*I$1 rdV$ 6%n:W0QŲ,+V & ۥd{GeQ)9qJY4F0(7t'QS!n7hn5]y<ى< 6h3KR(4K+ Z.zYƑэ)qIt),D"k=5dzVM=vn[&*]>?7̄\砼E-G *tP.hBp[!*PS.=pƘhҒrRd"YHF{eE(20dk@t FR=Nnc,ԁqPΖ;7&"Ĺ%'YRs  ڐU6˪͉/d\e, Yxp<wFCP2I X'xF&zvfжzYeC86ETH5ozgPYi%rR(-Ǭy)Gf+lPz!cM96C^Y;X5 MwDMIj%D` 6Q*ՃMwQ=5eMƱF%/73*9[Bn)yP',z;_"4y"+drR' IBe9aNh:u3M 1*H=[ /++w@ JKa>3$*ŵJ1HˎY]"8a|T^ngŠ+8']Wtv9mA7emƯ>uхxV*>,Rg dUvؘNK#".f`F3há!iwE+UwގIwnZcǑ~[eJ{m,+o)fpҝ}Na윻9mUÍET6\c1z}<]Lۥl5ƣo$?w;Zש#j0Fì,q^ZFK{%&P^t.fAlMtdAL£^Nyum~M"4]9VIlӨx"FOtveu|MS}<]4=7=}CߍoӗiȾ-!w{Qݕu{n[nyоkm_OѼPW@RWkD֥tTD4D}HGcIg#MƗR(dġR̍gQ7i8yҖǖ)g$K}7h09o5J)f6KQJcBbEbd"Y\rUOR;,,hK)cXjʨEf1:\}FJN4%'8{lIR#`a/Nx ϙYw  lȝ6 q#MV"y%|Vt=SyZsY*jR%F9ܐdVk˫<$$ &SNq:̆ʊZM="^94/$~j͓DPۃwx+"h1|>4' CFtqZt{OebXO} ߾LQyL=pAqRd\b,6lw!+ ^4E0B:D5Ѓ5"R1#rp:%=)`5%OfR4$/'L UqjXXmfr ʽ-/ 3I.;Soa9Y?A'b#6QTE!jřf 6V!T*`DURZecȜU[Z=b(DRؔQhT"duY(39L8bk\K݈ibb jWGڒ =0[s퍲IQc,MMFNYɈd%KbJ2"ɺpY,db\dšq$l0DS<Ƭ# dT'𵳔[~UX8+(I>  {(x(iEr`4ѽAGĴ' :*΍A$mzg9c)^Jr  F VOl(P =pGiFERuV%⢬roz"W)gȋIdph*=Ԛ⡪0<+=P<)ծ{|:m#Ή;r06q !mG*! Գѣ02nG|QG݁ ɓGO靿/E9j6s'{%b&՟ww{ DIɯ$A%E+a$+2[=k5q6O޵}]|oVAFAoDpɷH׶iL{ț:;ņڽiV'-uWW~nvZ_u8LY^̧|o:bG\Qۛ5d6ћ|#jL'\VT/XV?XۇSz̫͡q'7ޏkڨ07LJ!4ZxIP;DbC}б1{;CuQDm]Ԋ!aZ6 JKx@L@zu ()(U٥ I%LK L`\e7@5qڞ -5zsD!G Jr%ćl h]m߫S6W#|JҼ͋OXxILd3T9ͳ6TD!*%$z*6 '"BU 3qn蛹ORyޒ8fs!t0ғRjoVgoY_@=l}7>[Tݦbv{ GJׯ26W uYv˷)%ptcM6X~UN!;ؑ_p֏֟p>L~K|CaԷ${Q<9&"0mD#5,VI$\rc,`7߇.J_֏^oA|nw}WmnJn[޷6\jPpi.:'\8ͅdv*F'qA3dLXr2.W" "|| s`DNBU*)3f2BrPVH%PN)F`;gzSjL/^bUjOSte$15jOHP>sc/EN *+%0.b4]9`<4 =|iȣA{Q4Ռ/XfH%Hr)sEVd, Sy ]aRe~8N9>{GWj>Bxhh$RH!\p `H%KAi{=zZq(dhPEj[vg[@jXGY/F^˴ɛ8GOsȒvd&UAhÔ/E"?$G8]?bM3$M_,ҲMciZt:[yB?n9j+~hլ'-hDe[kqB΀+qBj=zE4y&%D Ig~(sԅ|He ǯF|{=~V^-۲Oۗm{{|N]tfٔ@r9??Hw~IKj~nlu4f@s ož~L^0&l~F(ykE󋋳8zd.0)Dr97.D#NQ0,G]'*I=WR{Oiu7UXZAbva^*˧fӿ:|~v>l8%qw=&BOz}Q74f?󶓖=N$gJ|xApkm$Ar9MȜFR`BY3,fdXs$\ /-{^t3>ƒ-c~nXh3ѠmٳCA܆o?/W VCء/Y#7jn' Z~Q/? X_87IC*Uw{%~6UaUjlg`D'OY{5V&ֻ{fŎ>;]O[ӗ@,ҪҺ$HZ=w tFm~ʻpE7MlW諘/Z V_vs;CQÃ2x]=69}'oئ+#8 q8ggp.\tLP -K}J\5%Ѻ[oQJLLBA9:k׳&ΞRASGSdI+mHЧة;34f{15v^#O5%R&>f0}#nQbEUU &c!wN(CS)Z2j PSqPloP|)tB]ZX :c}`6IuqK#Ȍ/Qdl ]VA=몚di:=5(9cLRp^dgE2: +)h>bL19Y 1QNHpR[ 3hp(q܁aj eol"JJtօ'Vې2p:wT7Rq> Iv)g.e4fܡ.|TFQF(ty4vrłϷ]C7wQ?Pir%v;|cdm;KY×06Bn3drvȬ6JklՒ=ڔ=, ҺěȫzD: (%; ͮ$6K$KvR:5_ BTddҗ;, VzYBp'#y\r¢R73.E~M6Y2eRi`6dц%<3t.ݝMz;OrԼPfߘPzRݫ&8HۗAW~+r)pRb]ZЗU>[tG^z+Ja =:8TRap-{C hמPj!zZຨi*Y2`Ϊ}|J],5ٷ+х/T|lNuyuZG.Mj|o |_qÊu1qRוM?Z~ꝟn/Ǔjz?MK}eڪefQF᳟pnR:l}6Pof˳J JRΧWiɽ~,{Xe)\wM$r$&3\q$&x_N޻ƛiR.ii )KѨ(ШueLsfJfʐ9^f_θ!=ݖ~qy;`ɽ_u۶~~L[L j\U<9/f$8?}b6PCaOH?PqgG?J GT vz g0'ɾxpIp[ .yrsQ;HSu!׷W_\4N4KVF(D pf&s'޳ :xҦ(]q=KʐS6G`R0A9(4 QBKnMr ^@*CKI-2H)zɄydb< \sky$@x-c%ETP}rq7k-iHwҬ= V;Z<ӻto(tsx7~R=W nӛ1n:%Lj^Ig泰ޠJmWWesqU Uyrbْr=i"AC=[_y~<=x;~F1h-ѥdtg(E'ueZL!Mx #I$l cFQUw]R(wr~V:L?Ge:q=(P.b}܃6TsKlN-,&d1t3jVek]ӗkQm{|Eq~*O&9%wX.}g 6/g!UrYtwd\,:EWxgo~QɾǺo{oz0NO^=<~Ih}'xm>rVI<7|Vn wWp VPo~֒Ic?Yk̀M Ra]5q$8Dl6aFZ`q$_d.;uҼ/R\)hRFz&=/KjᢎB3,yr4{ LY0crYDBr͝ğ"!e gjיw̦?uV߯ǝDuz3}!t~fn)N=3pDszܕ- <;3MrtYE?O>FuUussY[W;COX;L{R2[кz鞴e*?n kK+±DcW޸j"ϭ¡Ht*"NCcYXW-S7R}fd7K{f]7j>v-%@qWs8;}b?ګf&qԎ\\FZ*07T }J#&Lt|cj}LL T%T,MBD_*"ZFu*L O01>-zCWnÕ<]!J@W'HW( }JWT ] Z#NW@W'CW`@P]!`xo폺Bu Jj z8l_] eWPۥzh=vU=c+AW0sY]!`zzpVWжUJK@W'HW܂UGtUe_誠u*(@WHWBwt+b; [(~j*Uʋw[UƲ*0\T](qAoGbh޼Y}F2%`s- O?46 \NB]R@'HxQ#PBRUk{*j+ެJ.:AR`Mj?tUj ]y#op6 VUYo5/tU:]!JKuute^Ul*p,UڮUAɆ#S+FI#*% ]7`Ah骪9ě}5t%~C/9eL&`rGM]uU4UM-cPj@Wz}XAomwU-[:}W5] S}ǮUA@W'HW]!``M7tU`pBIҕ v1i5죹D?I8g|xX9a4gY?}NtDZL iNU*qB"JiA*֮-V .ӂ|$xA%6^uCQ/$Vy)s e|/k4O4Ε~+O&(VՖGOPv47[**ED_)[= ~߿ޫAùy2܌IZILf">HM|&FdF=jD  %IArA> ߓ^K!GMLJ4cRlt*'!Ps!9(*v͖\)էsg} y Z#Y((c,(nq3]!8hj  ] NWtu:t3ЧslA.UA;ùIҕ%26GWFh}p ZyuUP -PGtUH]YUA:ﻲ:,3=XG]7 BKk䃺z9JIE`=mgі}Wv D)Vtz3ּ?tU *h[*xP] ]q \UlUoBG/z8]lPWHWBR:WUY(~K↩5ڭ#Ftx_h4]P : ҴTt i\=J \UYКΫʂ:ERhjzDW(*p􅮔rR5Q*1 ҕԂ]{CWnUAky骠9 ҕ%:GtU]լ/tU ʮEt"teFsGt57W ZKNWP6 hOt%Ukzs2he]ts9ŹGX-g-uUex ]=w虑Yjn?-a=/Z *hU3ηvI@Pz:+I0/@c zӫ[j_@2dD̈p\Rolo&oӏ8.q|\ulv:_ܸ[RQcrfֻD46JA2턯ޏatvpȭ{5gS1Y$Ovw5Iͫ}2/^wiη:x_:xN@sbyWML i>\e^1q+BW8p>~ xmA@޵52ŗI*?Vvl^-pfhiMZ?Ec:9XF7>t7gՌa0o.#`x5r=Cմs0YЊ0y|1Zh1R?Lc5fI,>hγjt֏k\ڭ.T:\'ٰfW=O_:f>=%Ȃrx;2qL?6`l.'z 霭[ȦQbw)/vDX+*Tr i h,~w䷛ULq'#\Oog, &0kMpM4Y9Dks$&'rʹ{@^\EYcV}<btuu֦O7E[jZm~@k^knP8&^RMǔ8K0^#tā ZDs0mZ5k~cdljhm8Q>6-58-ip h8e-G͝a(hnFxv*8g|i;Q}SaI1mJ:t>VΈEM<Ϯ;߷%k~Ií*[F;57NPt#8-B1b],ϋVW"̝p)gl(CYfk'v1]+`[z`b~X)_1\j]$R&R;b=w>yY2F(K K?ewZ{W 璮vj5|o 킼^׶Yvdz6OGq#`>nB%7XaK^=;.0þdz)ksu;-Wh8jxOՠl#nj#@SmշEw^Ei˼\yɼ}BlS)qp7F<՜{cm mwNoɕ*HBP4_ͮuJnmmdϝ(y]hU#nt Ȩ=✑.XbrkV;Cr sNNU]_ e<B\NWRˠHWIDt%$GCWWFswj:]!JS+e 1XxJm \ic+Dyt(]]iW#+ .&D3Uo J] ]awUDb.ft(U:sts#`E+Y,thm+@HS+<"B&`h(t)wute&2&F "ZnB+DZ/DW[Еlz%~ܕ`{ܕ\~b~hՁA?զt%]mz2"-b+@k) ecMtu:tŴ&FDW7PRJb+D{~(%Ktuty)s)Ή*K9L?!!U"`Op -'*tFL$>A"+LeB+ȁ+?EWʃTU&&VCW[Z&+@M+F튆gz&V3y8~xMKs7,' ҝwOQ.Kd/U^ޔ0©sB.~BQ%N߳EyQ+p"3tKqGëls̝v.,h |.ׯ=࿫K9vG4|TuLjX~Gec[AoVAٯ !|4N7mH.6Yp.} #t pϞΪ.Mz|Wp2|fkfzkj<2voy |8W7/˖U;ƓwSBv0}сOir._p6Rooym%TogW/flPmd_Iy+J|DN28/q&+Zi@fM2!P2a֚P*m%b4T77 Gi@UU 7#Gc,0A SrYRi5r7˯BHηp84F\-;yeCl00nlM)7hZvҚJ`Ia'-/% &n1N#:cYpfcl5FB a2"Υ=+nB=-p >1[4 S8>/h$>?r<`-rh҂K.qFj6[d%z Gm-mdD(M̉~vy oG_3#j !0!f T=PיII ?uyr]f4W~LP-73vR}n}'{%?U}}㪸9*ɅEnL+U}e|s7QwZo9,| BU<)!*pe cJ몝~:j4:Z򩧲gI#݈V#Gٍ e^̂Jꕜ*IYܭfw|*;̡N5&xIqI2+;@d68.Rs?J77))ۻIq9[zSQRVR$eЖ$p2KjW2 q% +)2[XZ pN>?^'oӳ\eT?Ņ_I"ɊS\PRާq\ٶịY jV o')D^-h2'MiP}ε١v?Rf-c}=1wkXC?w䷛l|[u]%==݃=ޔS"t_:3Dnf[rw! JEZrk%ۋ$@wbvGQ0>xx`r|e K,Mݍ+ќʶ1I^1?_cN~3(\ [i(ië(1.9p- QlPFF5WZ![H]ƣ)`8cL^!yzų_~Yi;aV忝ꜷZ:kZ)WŤO W=*u͠zDItRHVhC~Jm2,=UenQv, 憈Zke]Ue_˲)UP˚"}) ]>5|b%fhw@[ 0sC6}#G; eZUxwJ$juKxϺ3EB! -/GCei#tЗ$K^ JbnZ^~3:]~Złf mJfmFxMKɊ' :u`LRgD=8Կ_Q2"ׅnw \PnaJ*zK&ky4Fq /igLtTyJh`h0 ØLdޤIE&IcI'٩44i$i?fM%v:ڵ9~3;]9Хg,5kx]hkm*ԈRJI۫k[e-HEVynceKZ0Zw\೰;U G_.'Q1m!>|&>~?d t#?>X~S*\ú*^O˼R=nG~Nm~YKa]΍Ulx f QAG~Z4KXr֒\ڴzyiVT3KR(}R/-kjǘ-} ސl^CYSL$e ;%b$<_\$IgfZ'&ʥG֤D&^ <2P D&:g-ٵ1 smUdv-M^e'L [K^s.EoyhYE:y)٦cd>Z˒fZe+ BX.»ͱN\t`!J'K˂ǰ-kV'iU9F46nmKs.*C+Y+`4\[Fy+DRa^/ &mYf|5ZӤ+,+٦f@{3zJ"Y$P2hKi,>+kI&N.vwT[Me& Qz6^J&:6fI˖DMh'G׿G>qy}9QVL.^t>3@_\?L+"&Rڈ*/PU/o{)w=_i޼\E;c t O?d~Oϛ?Y;(~)uU Nmէʊ~Tz@DLK35-eےꯗޕ=7r';$VTKok]ʼT38$$ g(]>H @ݘrWG26A&mdxEP۰`Wni~ǿk&XcˎB!iz  Oi@3UPА88)$Ly#U"L@rت7RakdQQ*VhOyGaLEVܺ|geVahk!E.GYG-yQҾS% L-AFXMc54MVG+f[Y6;W2;_=uud"]ߨ$qcn)<U@A~-L}X[ڕ[|No7NLSYQ٩R$DJV _}s 6\n>pIJ+ˑņg6kl>劰,a) &(&CIpL p-TIJҐbCT$!MFM-Tn!xe "^﫾1?˝C8|+/0X$bDgJ M/< h{u;0WmŐRǛן-<ZWmFSS9>?~k՝/PU_@wūˮnŲ(_?j9U?2ͯ]ѽe 0,sږP,KQErHedN:?_ILyw*{7 ėoU^-$;,^G$ }wQquN2h;fgh!k|oŲzΔYU#Ld"Z2Xآlvŝ9ҙp <|X^l|ʡ߲e;LRbejN!ߎbJ\ ^gJIVEb)$lKYa*(&vxת?ni CّVb7JC4N@4f8%bB-b:"d k08H IbWj<3֕PڝDDݹϱ,Eyw|*"ld3d%ΪsnFL}V׊*KPم/PqEb$ 4Ɩ9aD#]<[ovդs献`*Ppl\vlYĊQ[wӂ-Xs ;ƬUu%W"] z–Ep9aLsxUb2?CM8u1,b$ղ1o1e}Ίg$CâoJtKϢ]!4&$&buY#\ѻsig$]|J"t6&-]"%  xי9#M{l# HIfɢt Z_eZӢ B48_OL#(bI9Bk~k}X\` fid現 W}=khE_Vb6;n˷" Cx$bTBb.oD^B$M`.}8<"@5~xN,;ک dn2we. _heesU*4fJSF1=Ly2UʔBB+LFV48^6+Q+vfy"@kf9T7ʐL8IGш5DbUY6 6-(uS$jn}&>Mq S%fNSӀKI}8Pv8aR0VQE7v1=RBwmg uSԐ  *ITCy4PWND2#m`4}x\@iėij-9Q5 =l+Ɋ0] E2*23~sBF$}͞"\%~iA0qZ]j έ(&5"^dh/fLơ!q삽D4 }=օ˚@VR0\̌?cY+\xњM9bCLshF]Vxo= AR n BXzrwp~:( ɚb`4&ζ/,q?+3OV4,X(7H\4tFӫ ]GX o*bLIo=^ *LimL アpDuӵ\4uB\0Ro~FN^qmwk/阩,cݖd]P'hϒ/bfW?.S!dχJms΁8Q|~sˡB,dx:(6xs)\pj5*+H񛓾+j4&(T"k<_F[7 +7Pԏ c>2Ri8w>{ 4͟4wF8m>' *Lq"S O X}_l|x }'>6Gc_#=0\5p0x3jCƘa@Ot="J]( ߅HG*qEt{BxxyW;.jSZ܎>D3Ӄ (l?e)iNKe ~T(r[)=!+m*[oG'$m8jB0 1'֨Z@L]6;SDSMԉQp/^p^R)Jy$U Qa?(&,µA!$2}ts1t8=D0HiyU3$B`r@^ O(9&Ic}C)Ŕ7!Kr<}pZ˒޷2ڑ?en!B_#Uc>?ҕ$M %1bgGz$g O9)a߸t_L4 'nQ *%éJ!eiW 8Bk:$ʃ1,Ai40 CBuikFu=(%#zzJQܺtȷΜѺ a>վCld@]Qk EXi oc.dClLƸJ*yl=4"̉@"1D@o)-$nQ=ua9mFJ+MuchZId\s~2KRKǵV=)0 8yUELdat\C8p/FBou6}2lY]̻(OIɛ́SLTT8RF^TG;zrrĪ*~m(EcX8 Il>TZ9ٴUlŰfidvwjU֥]R*wžiF TV;V<,wef0yՠw`ʙ1iu^ Xt:.vMkRyڄ4)9aa4k{c]+o4!$"TMaS|o'Ә޸77՚G׏2(z25論l{zuН޻aCDH/o^FW? /CS_]|&4 Ϳ}4~ڎ-Ho"󲮢[7 ]fG_ybe+X^+l-IH=\7FYC/7Gq{Z}zCMSor꒙O/ ,Gjfk-6jih')x5gGי!9dԛ&eg]֋ gW"i3im2~擻Y\XfW^nsy궟<8-BYٙK홇}8'vfnE =)0X@ 1 ,'2S_4B#Yd+ZOnz\O٬_ *.Rirr=iRޖO-o1g-o3!0Z:,SoR.J txk2URc&m4C \`LYS&k<)ι9qMO: 37ݼss3NeEU?~[HҔj,$?m9[x\hmrSG-Y2[Q /lQc*TI5q9]^5+#VC%, =F<@I]徆Bi&u֛^|Q@j͢8`8>AQg8UDuT yJJ1y^fug79#9A30 ׻݅_6P;Jg`[<}!gmNNހugiRY:-5*EpVkU ;Q,N|5U}+\)X %wGI4|Oǵ0b:>ӧ&`MfnI`qiĂZ zYsmm1Ĩ6%wƔ אɄ2!Z`NMkPqW>-U1[  >iQ?ZSΞ|9`x08u xl38 |߇41JS:Oœ,Sc:JSY= CZpkRI4obtxh ,5e4;RC)c2H/.R?ZdJGib$4O+@ibdL&,_iHq/L#}/_/)/)=Sʱbn/O/OV7Q.)x[vyS[Cg}Ev5:RFt~1VDɡӟ#sTd$9]Tj q?xȸIpK6otޫM"9i Tx(~WGTrYFcؙ#G`p JH0C1K2!v%!LBWfЁq)%he؂CFU.w /yKJT(2+"੶%@SkO┦'K)h Ɣ7s(f@5,KXau3ZҐ#fCtKw%"j}}kZIk +T)(M3$Xe+ZΘmrǧ#wQ=$C ەmJ_w%if2~95LTJ0 1*F".FAׅKz 1W禙.f]G>+59EJ8K%Me{2'!H甘`ΧM'T-ⷩl4]c_ˆinzû6=T  vbi&uI<ә&(ɩ@j)oW"t"$33UD~$m0y@%ShȖ]1BQכPѫjR`L2; A˄Bj'ho5PYB BJ!VHLq0GohK^,b+]1GP<熃̯MpU> %Dm n?0&e( $wG-T89`*W e8] (@+T[#+9s(2\X4>P\+dCTybdE롧DG9Q+ uI#,(G\zˑL(T)R-C$ .o+"^z-m^Wtem5iAi1,h`E;4 29rT08ŹƠthd#2vc3>CMM<.~V$]kCr<*aI蚶- B.'|>bXʭd 4[ q-VrBO'LH ތRDT[C,ɂ֒43g<#!)r"jA-7O.u_.uKEsf((7I8vrQEB@+QΤ &@߆#`A'gZ(uETG#|2xyʤiZ&\CQsn pbw]iD.3{ӒCBϘ>2*i8ghQɈ XDVieFxQ5ȅF]͕r0[ONכQWYi8a *4&@U"A*Å頜.o ڪ$ە"2B0k Ur$`cPV`škŒ3ЋC^"$RKb!bԾv¶YPhDH;NחF;U q!Q lgד|Y *bqb=<)3h}0 #Q1Qy7w^ 0E @֡dM҉"IY7VA FMB&cӱn;AHr[иѠ1a,LrEۈ^qyTR \^Dpk􌒜Z0=ug0S#C6vC-WxjS`˃aҷ(FAWbW]S8x( b̦¸CS"g:\kwګe7OfլL{c@](7̬^":u`GcQweğh SHT~^'.1y$"W8ģt_7_@:B;5[4)jy2x2&e(*(eJ!{1lvQPc]ah6bțe3$h.|l[}j1µPd,P)܋ +V> }rQUf ksIeH L.MTel u0^/|rȩ^wW*a|=VA?BUOPSA]ʻI|K-$wT]b q7NpQq ֚ګH\Olxb/7]7. ۧlY2ɽ2Z-ߖAo9ad#ִǚXV`֎)+:M9,p34MˋoJi3+5mdBe(h4y{1ER@}~W.0RZ$L-vuT"qώV-H3]pEVwnhpKޖ9}|D2 >M42 Y9NU*hbG]+<;^X/k5kbصW]p}܉/xPo/T;?׿ZB@[bo)_I.E'Xt¯(T"Lnj3f5pE+̍X %[/ρx<˗AhK,z5vV{}ST`̟{ YYkZ=Wq+?+8^W㗿뱒=Yk*RD: Af9F+S0HSP,S-?X?lbDFlN7EZlPaz? ¸Rgc )OQY%2zRg(6ݰU8ؗ۲[_A0"S2OIL:{zxc[ 7Ʀᶑ%2zYe=7 1UڅNAۢyI!}tZ]Vż7fviS 9y 5ř̙D`wU[KD&}w;Hvre`shmKcy#ԂXVy Ue GoaR,2R5s ߪܺ#Xy.HaI#jeD? "HR/4FV f`nxݍM;9X"Jh4wo]aq=Yw՟RNR&`<ϗ ܝ/wBY&BRn8ʁ`V&ß*#Q(*DFS[(6f{'!L\Sv-0Hi1ϯBo8VDŽhuX>< ٙ+"]REN ٵjM{V>eƥmU?(7 t$DXKSo=(,H3-Wm#Z(I/( 2zv9nA|J-J<] ~r_+Kbi.'_SJo l$lDiI[lv\n/M%L_6fRT賏p[_QHc ׅ, fT#cU~4>ͷ"s+~e; WP^}&_.ߓ͞ƃԇPliOwK6[ԚmE}>/qz Sߓ7oHǔrJ.G}9Ն_mDeqZ M1E_Aue T r &1Jp&ҷen76Q5"+w\ )JMw%ʫ <-je^,9}&N2bLPJ8$aq9ZV?Q!Sdh}bdGT觪1!ʅZ/tg08N,~/ O>y\i1!WMaH>rx,%,j;6zz*| w?S>}M^/yy0FN(hv!%`Cp?nP>Ӊ!aHLdV5trrǗ^*Ep%\PI[U5ח?Md:P>Baժt<ȹ{A:d62+bsޑ|G(Q6`R'QDDZ}N}磲H'L%H|}C-IɦBKBQMve3ۡ!B򶍃p˵w˵+i""T!hF5Ƥh˧OKfr91{Z+2; ,Z,67n~|fw?`FZh?Gi~.5zL)܍sq6&"}L(M. pEJtx:LRֶWn]腨ϖ*o\ GAҔu|PFv>-yVZ]~9E(ChDZ-ֱYTEIdUaR=<&>D)FQ3Ưxa!l8j  ÿl>G/BM|֥8*6'RQRf232 cwE@qt?RsŇ :YfS lG֕{ 9OH}!XxJ2eWt<Kak+PX$~FIݢ3^C۴.v15kѽ0y/85EgRe/ִY9G2I͹Tצ'Q:1H=kqisõF37m ޟED(Ѣ~r=Vο41 VNDKQshɄȊv۽MK<|"-'x Z ,ww5 *Mv&XYEՑ[>dzhǭao_֨"̸ۥ'%GLgNrE|zM]Ւ#Jli=ljkpLԆpl{|Dۍ+z珙jN$a*sNNe?~Km 6WQ>vV#DUwVc4τ;F-T/z-bƓOwѯl!ٱwyVy:oܽ}Z]ݳn) uJܮH{>h1!#P),DtjҰ+ksGe4+u>lF fJzc +J`ꮘtBzA;&TfaR3P%1 `a}򰘺kp+#Gq 'XEIt̕P6~jz1PzH M͐ห﹐PՓ*rٕ ž`zG͛N_]VQWjŖabpaXJU`/^Lq:`;e*6BѮjRX.LyO`{J+h`(s9/2diO0Op'} %2Lj4LCͦz:|9t|^DsCђ^XH;3XC.+XD2+u 'SZRd)H'#^MId&uO=ۆD>x™0 pTzqbF1G1S!<y#>db^8)Yu˟)dVN֖?J[?%=4K>ZL=qzFt፵6 B]܁5}ܚaLh_"dgqvhOgT~/ g31LDdJ0 0cd=Gٺpp\ tRV[Z9Dt2+&}9_UKhR0(V C^h)dyfWx,ss:Ue/SFSA*$&8\K).2NYsV2ոC/2-2@rd4孨TwM1Z>/Z~Z?l~/{+ɱ\ǀ=$f9%M'Tk]\]d -Ļ V,qVz%1'JZG x>>Dh :l%4Fh90hVDso4`%X oWhHuyKib$@Rb'OSR`D $huY5fOx_f6*I4Klr5~D"F,}iqKx\t5Ãs;}&u)O6s&qw|6w|y$4o#aoH$tD<^4=A`G|c:Yt4s$ MJ ؓbrT*cVf s8H=U(\e=f(V{;.\Y;;<=O0g˪z>\ڬV>]_%=`iJٍ ;H_5QD*8Ú>n2W :>}֨ E݁(T@CCTxWro-:YOjB払xU 8+IcĒv9:NU8cpD-Lw*"·C #. V*ܶ ,Ma0(^6Kj4Ĭ=$9Uv/w p0,Cx{>gJmxEx'1X2*h 5Mh;NQ;j|U[01W!,Dbh&{lvmKC 8lENvq &4D6+CǐaBvL[3l QOٹY&Xߞ?#3t~Avqʭg"(> e3u'Mpv+Վ FXZi].šJ \YE2JoWH *}a s 6U$R+QEn0w+!Zs7wFE-faJI%l`s^ ,l_/=RlNJ-,鲫7'%dJ)@w[d0+:Ρx ,trQN*c3ޑ?t_}91]w]^R/Ngޛj)&y鞉 je3 zX]߀iV#%|hSJPF3<&^2 3MCQPt*DƑX+ٌ}0YT[G2} 3fJG!ogLմ?:.,L3+4mŤ ZdJڧp+G)r0Pd5^A%Ai^<,CI'sh B͈YV'@ HFn(쩪h:秳lT}$h#d@Ɣ+ á9T^j)` =(IVz]nTߋ|z,$Ob"?kx_fE<ˊ,nO[mؿ4Z[lz\Pw#@ V27TR]˼V)*h2]0wKmy 7Dzy2?5\wgCѠh`ZKʞ9,˧[<5L~,<Bt7=^_!M= Cٕ=T :!I@0ڜR:=ǶL6m)Q_^f1wطI5`F^>ʍb{2m9h}lW+:wGVCpLviw[.n3[wj9]JsPr'cWy>g8CQMIV j!uzBӧ֔:c$6CM Uԉaf:%[ysSn(Wgʼn)&T{P~яb ?@wZ G5)6}x=G}HT;`uES֞Tej|u tmWENs&\ ģ'ydQ>ek 4z d2uVQފ-(s58dYČz]o?w5HZʀ>sx/zW^\U-$Y7ڤLf^fS\IuAYbLkZK B9F 覘B4un \+ TՄob]_<06 <<ɹ/($}zjATO享4;5*?h4V[JPtFZf9iV*x2:I8bkcuԶ.h*=K*Z[r{0v u. I8*a񝫮O1;KesIK d#5'/Ezj;l1lG/S6`0)ǡC6F8qN2'H%kqsq븯hkCRLFXp\iE%Sf$y̋[˼CĆѳh@=w{*y<32QRUj~EcP4%hywwK֎=~}XcrpB<ڮ/cG6vQtx' }䎭+@%bV aV)fTGFwn2jn\Rj~sU~jOR?^*"A#p&\g ƙ ,RNnU+NG`o}״,ꆙ7%V7NylvϢi㭴&Peґzo5n&$tɿ_+RJIbf+щrb~%R!ӀS1j9}xN$bťC*P"tT:lY&@C@K:J\:ONm?8DcqB-)=of*(7Ԯ]o>Pfl>Y7 !owDKڶgL.p6?+Eyeh| 3vK$`Zi.;>/=,gTI4)(#hclU)"6zR(&(o MXo>;۰Ղ ?fwOnr ZTl6}նoRj|sƘ}Uiu@3O-@;ЇmACOSz=VsV xf( l7 ?j'gX.qwYp苖iUݪCj$3Nkz!GZC:z*`[T0 t2< i0ڂNs<=KP=Vr4niYyE%G2Zivc4 .8'(gOU5FFOQm:қȜuEv}{_UvM rAn0ʆ/ Jm[b>=U'Ur${e|^I~Bˬ(gY x?]QErz%%tPY#\whŝ=%n- YhA RUoI]lcb;gTߒODGg#OK\1Zy}Tkot0Ȇb#Igqq?Tg?3՟3r &S-AȴVr?AB hm$ }QB" ~t)>J pC.5Pޒ8%K P%;h_*uBE m)F'jW\9{aGļ^k]]DoW溬o3D6osvx!៪nLTlokꀍk ц@mu^F 5L :>6nl)ih4iւPqKN%-IR]2s~fE|%On}/وׂM%D| G#)T# RI/Rp, `}JL F׾]Evunh /kyҀ~fI>/I{to}ę~ף;چo#I$m\Jk)& '^Ue01^U@*F#3dnJ1hny]qQEw{S l/FHRAf*KBQ  WܟRh'3R|l*S; ,3z__ }W5܌3p:|-5buMߥ-9M28lq ^$IUur>4Y>)]C *|]Ú:uig :Y :43N>}sFMe-3ب)&-zo.)p@*5|bq-aIYZ:DD9EwV(A7ރ$=nyAc8v.zxYukN؛WEUJ`pٲgh+$\ʸyC=yl1U9ӑ[EВFCcgUꄅlUUd"3ws2V 6B htؑie9P9DV_뿠3i,M'M/&/y7[dܭ/Sg@^; rj 縋C֙<Ăb i۷ BȽhv9*^H#qJUpNtwGXG^#PMr$l=v T [%SMN\7`Kd+-g'rj3^6=*{@˃-Z֋j(Lr& t"#ۮHg OJJC9. Zd*4eԡsEPA*҇ }薃=*z0M4v_ڣ8CI2*'*&C"ajaGUl%>axEjibq .$yƵ6]es%vm;d( ATʫsmX6q ^Ml4SNstm 6eUB#|$m >Z6dsMj2ږ$C^FLR첍>|8_䌣\ 祿-2n-:a᣿N37Շcp@뷸Onj{3ݬ_uC۵5a=x=?JY1H^nN`7:ʂBZȘn鷄ET)6D$Yr@FdXwL aj*z/≧!` 1U{?v#d"?=#ЀAyڍT:uGCce'y7|ذ F [hH'cJd@<ݭ<ӏb֡x$ҁ'kib4yG}uv>m=mc5RjrAn8 Žeئn*Ai .L@w%x|Ug1Az.u+vSU)3V?=Zϴ޳ 4]ִ著P5M;ݪZ5T XQ<`aŦD1vT+Z ړn=R>>2&CE\jx%qy~u1 csNe}@0E3=xt:dU5d/v Pvl9;"s|%3'u=FcN[aB4zAnd ֳPR*ӱFiU6>([-*8Y'd6{TZS $8"M @A8 }{'=>M7VpR']I;fFfm=מ+bk[XzSd{=ZF]E::_ZzS h1iɱ2d~nysM dO=d#to{VPo9t=\XPrH(|$Je(0D@c{XH _ [_XFCrD ixBX9A/Bfusq|(6v!h Hn77ˑ75Ԇ[~.,QDXk?zn$N} __N ?g@B״6qY~թe:8tF-v(N Fr^l:V1-g uj3%|F?kǫLE0j9P\ PGR6 KA1WSJ/Ś;W4FqRI<\O楕2.H|wiM{$o+|[;D}>G@Y, n+r-8vNd5;|?:̹j6)Eʢe5q#>4C0}-sڽUT,Zfk,n`D2ydD I+d5ϰ8t1VJ~͐%(h%(|ip1pp4IZw :#aqǾh W!(|R%dLd6<h&ӧ@"?H\V߇V[~., X$d@uԴ< !BD6%   W'HFI >M wuqJz[`&bрn^,@.x/~:eYH_#F=NI3b9_w|&k3O?`!  YWS)71w*`|0!$_p=ڛӫ>5sLY {Y, A5ij|ˇaIQ #K%A;$ 򑚩 !웎#1 tmG>mϮCX̺]>~a" 勋m[C䊨MYbIk*XǶ $ Ee *K4%\z LB43#9n-,-A015E3TSI&mQ%012hZ-x!H8InЮ(2КEm&&9&{tf ! tL{7ۘ^="۰ &UAhl9G}V mI{$ 4=]gYCq@xjozϮ{c$S$kL7y[e:I0PnP[1eJ?d;jFqWp͂-v)dC;̳1Qx6#/_T54u؞F~;q;$%D sx{gƈ˖*f:aw<,۾݌ᆪ%K\d:bcnpAKH`FdB LB*!e[SIQ1 mhK9 .CEhgb^37_k`LozMzb 976:Kl{6jӵ&M="*&AIǼUjVC/wlz+C+p>]/WgpgV#rRN!S{,2 JcC$@b+k"@R} b,/%Go>e, |2q:E&P3^Z:E*o)@Q3zҡ2[k[{`P/6&eKO_m K}~ac\b߬2[Z7 q)UW{ʡwG+KBuLۣBddT~R9¹ۮVr Dz[I-MU.fFMլXn-@O5&PթĶ(٫lLBf3)ƥ+n#ӈ[I+q!aܹĝ3nBJSonْq QojZ(eKP>jF'0%I|)b(UZ00Z[ugz3hR?"T@ODBCͧ/v ,+A^Բ#ëŅʺsO4'rKs"y'1K&#K Q f"Bt9,Cp5pr5 ƍY E1Dcɿa/@E2߾Jg4]Gge,<~WXx< dh,<. jk~)K>Z_?=9s _^!b=t`>tN:?{>c4t}nhE8qekiY1qX̎#h)ﬨe1{Y^X~yurcA 3seyo9譞ܡJVl枨~/JJ-}1k=Jkw{ә'tɿ;8ߌwX=I"}'9V:g Ĥl'e-@*>EjMLڑ(SkL־w=,{/FQݟ!m3aUx:9{蒂Sh z4#H~G BG[RK S\{~aHXTx;|Gɧ76[K1&[a܇F@>Ka17uqi"S`#}HԚF wCuvهn֟>A7^7yG?~MW0}n~2>ݥ'WcBvDj- ZNvbcHu4k^6䱪 /ʮSbB@l6Z%ܼaZj;O>DsOF\}d䋾_s]8d[w[$`Rgܱ{<嶨iOGp*ˣ~_ў6#,š[0aڻ0Gjx}Ibd T'ge*/nSflc^׵o>z]_e |5ϏxX;qL/7Y?o#;} H//p3? 9yzm3 ck#cnYZ]BΫ[t}>X\͐`ՒX zfu&ޟސw+EU ZSN6g׫U 8!\vmtwQ!s}01Bޅ΅.ߦ8ݟaS @B{zo\́~_"N>r"NqJL/`l\UVM s<.t;RUS;bg50llp 4!qv !Nܻum\;[6b"y[)gh5nec?ϰl<ڀ"ή@Uù67:6g!gIνz|T3b>9 ϘԶֽ5tZselݔbWLkMzey9b[Xn)BۦF , 1F6,rpMDܴW8@| T[YT >kA1zl38Q:NlB"|Tq ,Ǧ;V7m.ƪZo@:6|ڿsnVOtcw`v}َ^]_]\_== _?_]]<~l#7O=^wc`i:;*G<zoFtlzW!Iӫ>Ws͇g?ךgoQ?=郳`#d7>ሔO?:j3aC3c,Dݡwj?FH(<1rImI)b~<*$L f7@wgJ`g\vc"dI>vr!T㺽ْp `Ke*qb+ClV͐\rƇT{)3EPa[tEdc+y\+0v0!lCߏҬ).q0Iu&Sj0؀S:9-7Ŵ^LI&sidw"T[ 6X4` A o-=j#ՓTI̚LA78 3\fV@  [ i1}xܬ6 PpFA8jH4oE(k{(Y(%b|Q@6F]$TϬOL9akt+9R^f1:| rQ{a# rjr A1z.gH6U~.@?c ?& ؐi Lkh]7`L*41G.uM.ҳlJO@Qි5Hiـ&?kټH!YwP=-}s¤_x ]!55Kmní=O;v`!C7}xt֞n~dttr:{zA:*y~*SAWtc6^ãNvPԤ"n4p4KtGM4),uѦ-i \z)#׶y{mIzWL(:U*`R^HG=|]-yQ0&/$oc@|tk=kgU aL ݬ]1Ӭ^[\8KCTcl8F:შ[K+-1ˁ/wd.m"sCv"*F~"u덵ґ=x{)* XŒSaˢhЅE[SћZ5Z-/,-A-WW|KnQh8$}߲Ef$s4 Kuj'Ynچ}11cY~)KVYґZ?v SJT.*DtSV #7!4ׇփp*5I^4>R ELxs6csRx)o3|eUܲ4<]j JjhxPuי|6IQ0uMŌ1vaz.j^A?P*"Ysv$>ǀb %Ӌd9dxMNYۖ 4_- I M`'F@x=mjR*N;@s%e`-Cgܳݬt"eKr+@Łt]QiCt2ӎdK VD DbZZ5y$5L?tuB` 8H1-(|W[S$ZIr%}fY]{dm8d-*l~ "v!{=k|`R`K8rɅ,yY .f'YI4b ]_),!T!>qSVK ԧo}}#u%ԕ4RWH]m6R#{MZ?gZEbe`v`H}~w2p`|u૵*^*R",s5àr .A&В}YE2tm}%Y0*o%a4.#)a -[Z:8+(JRF m`!am`cjeu@$h/2l`hMt.N Mf [&%wJ`tV\%?їʏal h`-C:s* t3zߍ$5 n sd$qkˎr]D]ۼ!XçWNw@v3im(Bj[J :a*G[PdOM)#b@wZTe ޞc%}Y'xΆG$pO6yaτMC^Lx8G4yx*9(ާ?j\~ZP>x@ e;WWRSZu=x-ɱ*JH2<ۛӰ8pY=JKL=w?}Ql\Þt) q?9LkדnKQS/Q'"MAC1\hJ.P3E#ÚlnuEBmSs*%ջ(5 ir/F}V|=+;^=asAsڃD&oIr, ĸ;g=&-z8:^R^2 +W2X2δ TuV$$qsr_+7~f| Օ]S$\0/UR1oE'7F磷%@Yj!iM:\LjX [gNCBA@FҐ0i8F902}qR<9ռ?NbhdRp68:;!26ȗlݐC߫ԃ[kM[a7_ᣯ_߭,kakq>rs7ܰ}V*k3_;h])?r>#\&vw}k߰~㣟>'kg0 h-kg#hÞS A4t?4`/*g'?bC^y(kNg]:Xւ)%*=Co#*~O$1QإtFrq9o gpu^fO#]uAsddiRɅ>p xDsۈa\`=t5>yzt/9#2="K Vr8c.aF˝Q.QKT}ڼ^h7N;Aifi̅v^h/ZMt"+brC^>LgerֹÉZs:vrNT?YsG=6[vc=hW!ӄ*^nCsR&xZ^lFk o=ou3rPmmVbi| d q-ŃJ[^?FY=̯_;^ZG+ut:b;0<.{4bIABfsD{[4Տf *cTهP.ə<F[qyNN~>Qz ANQq &zip(|9(ǎ;+G^ޟeZ sثbC^A-́J:$oV --Qh5=-׈Ekc2cLrMdZ&GҒ5l-ȉ9Boh=j=\?Ww8-<2ƍwpRI = ;<9hA\ fxߍ$+WI>oFDǻ(67/StsՔG_C^x?Äo󵺆OMx TrOwn/?+hcy4 w 2n#Vք->UT(6M:qD fRa-;$קyWr`/p%`vA})U=*2}WliB{\8MQ?G>x4`=ozȋ Z/9u;3-CCkܝ!9? [.Ao-(Ξ 9qֳ*07KQ7Sz?/dB_y1Axf"`]Mѳ w lXH*y^߻mWtcG}JB [jղ@R44Q[gW{ANiwZ/8S'oO]2WȚ87\ x9ZMB6Ft`ZpbFvޕ5ұ@툙Ǎyۘ<վd>ƿ~:YH%(lSTXXRa cj9QY/` V x!e@/0֓J9)ĸyMgnn?)3(\FtK^j4->KS*ij`p583"rF뻷ꆾܭEaImԡVTjP(PrBDNt: J=rB4 .@tcA&HArзS=h̒ |Gc=-m@lT" [ݴ~QU=Oh{E]V㓧I@mW}dёYv:ldvw՜L'5@M7weZv:kvVW:w:/\@\`Ih9}*%ԜS8; m [݄~[Ÿ*ۄ@-"_~VGyNd z_ SC ιWZWRrI5;oo57e;bK>{v o>H݋+ Z\j!3]JBh ϵ垈}!eTsq3neBKLԆOowmPm\ y1aAvƪ8٭ 7si]$fb֫çۢ;4 k_ֶ=uMp)ynK*V}-42.5lMf3I/jTRg#'fm[`)@eқ^(w /٥pz?L/v¹56;#.8H<`=*tgdդOuFxMgo_z_~ڠ)+VL׋ݹhz8yLK{uS 94RWpL_U&eXsjЬ`Uj5hE zuCN 8fJ$火cXg\Y̏J'sFyb>1L =uKa> }qۣף$H9Gg/NyP$(f(">vd*䕝4m3OT|bʥ+V\݂@}SSq|i+@hiH52q 0nP1!&Cue{9 PAc`vOL$,Rrȑ~U~Dg!$+@vAwh/w2kq{akC={GΌ@'9*.~oӻB-5APpnt(EPt28 Jjaڋ |,U23&ZjJ.>Jˈ]2V[ApJ+g|qP*HڌU\m }|=ΆMT! Yh],ʋOVس\ \E9[8qȦ!OVVZt`zcC/REveк _jz;67G¤;&Z=]VC^L] 6x$\/Dq3V WD;Of>yV-;A+:SX}/w{?G /ʙ%3AL0F_]ͣs+R[-!!(N_Yt`'V $Dڧ)ݷ}3G'7w8^䤿sf3W3)_K_n@p^eV) IL:4&*b.4)XD PTXApؔ=Sŭ%-r(OʰY59+S 7 Xp8칪F"v[T|gÜ|h6S%[.; Ia@8uGaWƦA @@Ѫ~e{l R,P IM+ѫ&_mF_Ug|Ʃf1 s0Ú=YK#xs.*&QSkFįɛs~^1N,ޖoVR!/>i08 #An^CL 3Houv|} n1ع |\mL<)yA!JȉKyBR  TrUqEPUD2=%{224))ZKLcR hZZT-$/;H__qL1)/o'SSJ eHzi˨z$ 6yuj.Gr☓6ϭS̴It^ &5[1RȒ ^:8H>:w^myh"b5xHك\Q4+ITЫ&}Bֲmu6],OV18!0,}GQassG,쑝p=hNcI3ܐ7#{đ82}t4cFY|ڠ ߾^_@20w0!Q5y}&J?ChƧ\/NfybAThK1+SZ\sT3GQob,CR6D t~HVq2~Q$E~(8=`w}T`THUʠcC1Md)#Q"E22A׼3ЕTLC@c+&DW%Q??X3C}{ns^vj q\GZP .wVŢ= hf#%!;0 sQ] y1Jm5jVZFZT N=і#15|A<·=VWwiTp|pC9c#'39Fj%TIj; {MOL 1p@sKP3:l]*㽪۞Ľ8t LoAGr⣕.2%5H5C v]W!a +Ⱥb6H<gO2g =h@n?]._Zo%`(gG [#Buϣę i΃W]quB$ҡY9`}wokSw~b]Yyշㅮ~_DzƋBӐٗ3zea_vQ?M8B?^O6g}eTQwA%UjhF<ԓ];^PWWO3z.?6\Cm~赤7E-G^ͧwC;HWOeؠ̰Yү![o=liCϷP]* ݇b|fXС>[An^D>'gtJ}*V6d*〔9OV8q><6c6Ʊw}2cm̼h71ڣlq`e^ 6b>~Б䂣9>!n|/@ol/r),E,3G&ߓ^s #4f(B]IrhDNʻz'1,wٞ {mN]=whӈ,W@Xy O<ütZN-͞_㧋gz۸_w{JE@$g4m>1Cr%%l'-HGi$Yi- #םk]\xnz6*Bs^F_ tolm[nsٕϯLt=IwQId)哘j8uB3~pQĉqyǛ;v>U1\.Rn݋x_^][L -ʼn57v;f|2/[5_TC1]q@5we(|+(L NB&y|YgZZ[􊯯j|?VoS /s-\ #i:U#NHb U$åQQ%NRqPXM]kl[҃DUBVj0j5m|slx-<ӬDe-eINVZɟ 7TكoԒ1Q8c.k JX=iOӜ1V4a0+O- n*|~h1#45/觑h1lOeR)RZ!cYB䥑Xk]Q|Vc} EktItY?-2MkqM!En߅EdOW/J?$A;ÝpW6gjW6?V|[$C" }qyAg9آP6_H/f /O<9?K@,^*1j7Rur~&g-ӂ-yْ1&y:Ӊ0* \ CGaFGp5""ZHU03u}?th7?FX3cTF8~?NȒ$WC+8׷n#Jm}_Yć3 /\1YчYd,mʓ]co|ći 땋KwS ƽV:ן>G4y6`)&>X9 F2 SHScg:~sӆ %q %fꀼߜu2iiB:vY?~qӿ]N|ī?~=:YP*QPIL$U!\lǺu#dx)' 7sxTz, * A ZtiEǶ66@ڪ_k3AjKęNQeټ*."DHytaЂ9 lCa^ ƳsAzC3󸈑4! p]!hZI? W.\#J5Pgz:pC&)di%9de#2 2r`qyi;@3*U9Ya$ڰ%^.fTF_5MqWCאf,rQL Nj&P@+e+iTskbŊ )@+NzfIqSFy' 4L bCPѐaeiU+yϘF,>6DR;(usPK8H@#7!*wFP NJh t@,Nƕz>(ĝ(}I՝"kƫJGʔäcHn*&\޼.^i;7&.w>W]CYS G]s=!Y|wEj9_ es=?5~)ՆB,Z*A֕3rG|\8#"90[KL:`+]h[KMmS $}flY{qD/ƓX:W 9 p F3 /(z /%xʭrN28VDqʬ80">f\pTw̯vT*5atUuE.8kHF4bG/?߹/-g '$3n;W]o"rXRkWaF6NӬJbIXP264AUQ)v&ɂjPeZ@>hNzhקZ_iSO:_mn^/ѧϟ*;hڍKFLCj8CD!52Ӳo5.zԌQsW12JguȓG/!0+ކCyyjXOxҥ `$Ya;1:g!(XX3n`|o]Ai  Á٢뺚PRZO}ӪD,tԒ\\I`HSp>)/bT.eᤲLG!MFu|VX1[Тޝø  DRvc;IuC؝{P5%^9⼝'dzi5RERE%GZ`Pm>* 3Ro:W4b'7ݳf.fë_OM7%F~lynD.<6s[RP$NH|';E MXA)3Mjs<"^voM>Êiֳkjۦy 7Ę_8;&CX;/T=\1Jcd%CqgvIvpi10@_3܂IϷ?чF;%% H_I~;@/jtaPs̴8!sjcS:X>yrP^J\hCP EA3X^`,F dY89CL1zi;PYat;H*e; ?F%ҏ{k FU.&Jr,=aRfNh 0lQh3sNpkUmU(Z[mUɰ:p a"Lg T(X\pÅF2b}FXZ.}l4D/ CF?tpIL.qe7> {b5 ZWӻlpp:ox#LD$< Oa4زr i߳]!UFH.ÍM1iVVj=j߬}*7l#ro w[r7ߞd1s GY?3 ~27iTu| NTN822\>yшP=.l8]\]rڧ9lEjVqJK9=v-SP0ԤsRuԲP\J;y_KuOvkGo QbRJ/㍺6_+k!U[ c?4Erмߺ̝bȈRIir' ISp*J_G?{ XJ@YhVQh M4^dX*es  Af B+ '9=>.f&uhDubv]1tJVQꊱ ,&:~1/qpI^ N閐I~mY:I: oI(h6r+):kZjƨEI0)ꚢlM*e6֒xo@y!0ZmI])_%LYBfb̢mXr)?>}O rKOV+;"Hh$ O5"$EJ4s܉.*T˪& Lʜ w! 92QfAJe9(pVlp>wΖa& - Zz+ƴZ(琎j" 6 o@3>4䖿}'D^o?+5"0.**]iw"`%ѓub.Owi/o.>q\T)\LǬ]n&<ށHKDNn{/&,?5 GIJ[(~{/*7TȟgշjeZ!nrjnp>p29] c[DƄГ5Y%x'۟5+ cc!gWW+ѻ{45mNDe/Nje 6^p Ic_:NgeqN2j@rR*m"Wv`l9pyһ~9~6fPaF tPZ%:+FJke7{0\1&֌M-n?͔R>ڽ`ŢT‚0b2fwf}i)y8-%_Ld%dP*K c&z"sjfESʚ^գ7Z_4zišE|LLd59gW*fjk]ldklnvo@MMlU?+ pkx8*-bE 4D(E*1Gs]o {@NqGH;[Zg[#oԚxkcք(-NGԫa*,sr5*MՕJZNJe;i zlU*.z-z/b^d o&u\/ǫ_ǟ˾.<"=Zu rrR󄓻w7lwy*R 9c#}{=ퟧr~IkdPU=(\"F:-W2g1hT'< g: Adӝ Dl0S k@?'` XE(q[e`P`A^dl1?4/գX(m&A˗VJ(gjւ* گZq%KDE/85kn.IFimQJ!:3"NI;$'p:PNWuL^{Qj-t^ I}ڠDuNX M,>D ٺp@Q >9̤UKЕ 6G"y2hES%P[@+H-e_#TfYytFߔA2\ ΐđ\)JY+12E & ))tZkjbυ߬B,Sܡ*Y]Z j&'Sʰ&/ [K*n&zEh$Kr̒X!6)SpE!Е]8Ae(D["PQrP⒙s"!t?bSfX:Fk YI Lѯ7:OPP3(hv-0!T9GXRڅ ZYt+`L$}+CǐR$Ң [Vc>.algRĴa9ZiLXvI(sk@t,'.cYql!)iyk V*ب7}uI}amEU) 궡؈(a^dXݝH{]]$j AFN#6^Tz/Xm}ݍT۹hه\R՗C\~[G@PBuKo:Kf]Jxs?Tuyc|S$|ݯ Ϊa4jg}j1=VLtI:)RK!V\Dc5z՚Q^g!oHzSM?p[j*bArWeܡ',C,Q爭~b&)nI"4-߭􇒭 Eʵ+e(zxAR6VXvRU[!_곏[/ۗK V|*JKсNA  ԿE5$fkdO/q6KG,&2=ը>yqfԒ'ZF.r3Vh^Ema-as񂇅5L@|d 4\,_Ey9+5L/Y,(K(5wȻ"N;˻~́Q"[LZQrR:qAkZF(lc}A닚jZ_2tw7vZ(g]ɯJֹAӄaFybkcLվ+՘ҍ;;K.r@$ӮyN^i 'vaN1l/~WH|ĥ8` qe+FAΐuyv6 |f~ ,5 I׮p֤k>N,=$FP|'[U!i `B bѿ,:d$"xєON(i3ӎ[a ZRL=^1u3t_N"uAO ^_/;2R'F(fPFTs)$vJ#@ZzaVi̊s! (ڦ嚪I.)~?$߅d;(58EŇMKWfF̽]/R>0rSW_w3 t7yW)LGt+ l._/.f>wjGg|svAҲiqݳிkş2i~p^qǹbYB\^]"U&.C7_@ ;"oF\ @7ɉ%ю/ɘ_{,{i,pׇN/7&ͬuw3ӕ#X7Ní6-ủ(~ xQlQa ! k?N驺x 0I{r[F ې F `{*t4@.؄T? z n 6FƖpFӟ tO)w=9cfݵlnjܘa}jz&֎~Hg+ߗʍqEҹ@%Ir:u>yݹ62y*I%Bw?sk^1%q7'{M& 2  r2]9+4L Fk%H@}轺M M.A*C Z8CmXp?h_}kSDh~Ձl`lʰ1~o>Gib+Y}u5#z\5|==H*X07ZYpJc͍-3޵5+sLxNMmfJf$AXI% RJlP$eaIF/"Vқ0VQŚe8籔i"`HßH*nai$V,!k2;,i0+LL`X4 H[,pGtײ1$ݲɰicj)0$jK> >惫Lo F]=!`(]ʥމd3X.{8#ܺ|NJMJ-*KbVS-@"5, ƺA<$F$f`\^5m VkY *uB-5`sSc%)pJ㹥b(]&p!wolVyu9ܯZ*&s> 2Plf,FnNgx;TQtJV1k4+L&`5ޔ5}CyW>wU$ݱ[,iR9M0pi"ibH"e3X.̊wF %-Mnz7 <7GĖ"YR;+?[hAל$AVNNnX.\rx8Ҟ(r.7o{un'褏5ֶvF?Y>#1x;Gq}7j)fQ Q"M%*T <3ZsY l6*5Œelw;k"^ڮ\MA2Vwɮ݆N$ ~Ҫ~M4q@BIIgB3X)Aiւj@ʑ^;l񤊉Me*ZEh6qYJywۛ<`H`Heq/,l䫞3}``4}8mx.s4~qx|mFǽn|<}Oy<}*[Rr5.AT1bv$ k[\ihd'>QRizrR*ȥ 6!h(&r .r"M̈_9kϪ,t ƴق38^7u\_y2t* n@Ɍq!Ǩ2iRu  Y ,[Cc:XFtb|MdlǩۏLaIa}:|+K^kJk]3 ˾]z>B,`@Jj7cwo9(yZחN_W =1`N! r* rScgI9br)ʸ (Lr"',ƫ}$꣛wjFa$-k.fP aq}&jSҘ&NQ"(DF4J1v੓$ٴ=%s#^6pkʃ&;{/u"woY6$bwQj,F?(y/'%vI2ʩha^i*LPeC+oh)Cj-P$ͅ{S;p}7j:+a? !.'/VIXk-7U?XT(5j`'%@Jl,iמѷӸk{Vi[ǝ %aiuA" =qeQOl}H, 'Pjs]p)D"lۦ"x~8W͍n{ w׃ݻYJ?j `=ګLj졺"J?Ov|<V@Ithb+ ESyy$L?E͠ `d$M'(I,Z VKic:Q($0NL̑$)Dh8~rJL!kLۮ+"POU`/ CrPA'l(ȀӱH(5%.Ό<>aل( 7yć#ѯ}]_ejof]Rp4YzwX&K[q;ΆǠ(ϻ΂GI) 6~D'( :AL UKG ZEL/%ӂЫ߾$3 ꣲf!҆$mD,O# c)A&-3r~PRBu"qmFQLaX<@a3bawJ.|By`۫^KsrO7Ea^eXɼ8x5+ėv0*Zݍ'n챗M}^wU恲.*90\L<|ѥ]ɰgݞ\VUH%\vD0^bM/L/G!fJn;Y:nџswtrv0@>/|vtڿ?f_߹|1>s#뿍^v'^񲛲zv~vKL _?g)|oNRJ,P8zgCsI~7v4yo4^s}=u,~cc/~31=|+b߿ހq5,r̸eȂ*^%s.tуn?S=;+VO[]j"*g*RLˈǖjP׉_xr&/aVX+E$g~k^yze^`|tR s@WÕt'Vhaχ5pwQv)ͯ+0 gVAqRYz9j,5QC/>EcY)c"! c[P$~႑HSDhݞ|S9"k;jfGPzs4f'W|wTeHݣ^eD202q47YiwT_gyvԍGyʁFB8 S4r daHńjMc#ULLMX~ݝ"`%t{eMJ)Nb6n/{d&kdoļU1sȃ̑m6p,N) Gpr"6` /.YjXmL}A6 (*4q$5SDŘ9y;`ZARm+,B aTs9. Mzxhw[9?|}Xb9(Dg BvQ>+ʇ[}UT KO#m־#J۸Z6W8hX+`"GRix(#LǠ-|VEXBZgK 6,0xhv6tmOR;0PRPƟ\&"op]Ľ@H &*LTZ,,\(Ga#? Okɞ&6YIkcj?___oKeqlEYdĒG Z%I߁]RfeK%]+0JBe,4NuTxaW(EXe~;^ݜ+1*5A8FYpRijq*LjhJ09FS'Z!Xeh.zEK7sx<&w ՟qESQ99 a ['otiI_{n|cjiΘ,v1?3.ffL;bNU:!6`.]J[Le8q.qJpk  v6޵,oϬ|kC d5y03ceOh7pu;(|CqY-]-VVruwsRAmty:v72q{C2Ibe ?19ń[4$ QEtGe"Τ@@@@T!D:otBn4]0CPH^yxCnnPC="'їWeCEt,P@$s?`q[3•օY 3_̗@8%Ί%n8t\#҄u(XL(礩` I(sH0bj`GUr{T}<*ǣ[= e!zSqgT3*YT"{iURX"JcΑuJ93VJmqC$2Xdٻ*w՗Cr]a|lLSbAa$1A!$ ,5aUJ- x˥t)1ED]5Wױe _U [~x&֎NԼ'cr|w]NJtȗ5(ǖZkܧI4N"MvbH" #v2,MP)G\,5k& R8BSƧ4~!n8@~FY|x?aZ9hñp!B ˹J2D+jcA8q4 "0,L>Si8p t#,q4$XU}d(v*tH v#*s1F/2F,Ie(LkZBS64ՈZN85ᘸ*DJ9܁x(ۻ3/5/fe|MM|i*2fI oYbܫ7ncI^vg9KU_ !.9 8O ٔid[i8Rj/3' ,U~9̋7' 0T#ėg~yW+<ߜtR1Sv?+sBKz>,pkm+CVZ?Ĺhў,y; L]oH! <`W`'e5>G mA)Rk Şp]a#MmmTi'ɹ桕ך ?&I].nIc% N+y'#mT$99O@ I8O)<8O _ޮ8UȣiEsBqI7.^ #$gx`. k )"&@jVz޳2Ϯ7Bi4e>/\Kivuw L*';RYKۻ;|"dY%DR~;YI~?]u˂E n YB^7S[TԱ\ǿ޾~Y/x9QT<1PJ·e| TէMP2n_ڻ%cndamg"ì͏g?ߝ]@ϖ߳3A<=3U*l!B\)R NeB?E 4,d:b-qI&YTK,|98smH%ڛ,*἖vPD|4ԝ^KbŹwRy/{_4>(Amи^zР Bi"OAR{]TQdM-8`/>>tnJNs]{WYuէ|3(2y[#]\qYU+껏ŵ4sI`;UDm"u9Qw\{9$OW KE$4+0](~w&6 zzм\xhrm|1-a/G"u5p aydJ%kTC.Z!]R1.aS+$ɹT1]kRn@h8n r|z?UphT 638rnpv$X2L*1?~13$c\W}.D7c@3q:A`ZzXzf p2 9NJ>;1 ͝؛AvG *} AL@ 1󾹑OG0!!_Xk* ݹOÐ_rcn?|ՃۅPl x~Z}UPQ٫շOv&PϾxG+ b+?eb~T(4r0KUJfy3#[8>o+Po4{HEbh"ʨXr[cJ 2#ueEEN5X967|dPGL-}vx7 28L>l :Z\<5%@8{H SolY>-o. Oz0-pڿ{oH|㻟bޞ^&S.0`+}XR K0\DhӚM%#JHġ0U >$[}cAf.ν[ޤ2Ͼ [Z]H,Ͽ+ߧWY&&d/jUzZC A-'_x kE g\geBoqǷߎcn#1JS+Yi_ܵ+Nˍ؝IODGX+9E#o˄52nI^p0f0 Bh|,=$H< a=o\| /-y+pvmŐ{`'<]w(2b` ƧLpGvԈO3=ÎoZF*6;aYEH:*xĖvUVUxD>usMQ(]3ߐ,WR9v:څ1KDhPPQ/0EU4+c6xX-?D'/mah-jQ‘0VrriJiԯl\0:S$j\o(hq4Sn=5MV[lٹ 8M<1#cz+_?~yYT8uyIK\&Fʩ2/ͥ+&%Q7 Wܤ&Ԅjx"/JxOina,&[e^0$!E, jĔCd>&RZȐyDDZτp(eR:#Xֲj4"LA[ p2R/1s6)*Bz-zY8a5/4 c{blSWF@fsfߠyN8i1CPVa&-$7 ^#Zq8󞲺PszMԩ&ꘘ,DN{6uY&&e+N5wL;zg9x,ZI>-Б!iZ0c? " aDҌyda&L4R3^ey]lJOLEdALm@v0h@:7R'Q@9K^zջjI%oΠ[Ef"UF}k-2]wQ"\QA{X΍%wsΕ Zs*  2`8n HNq]"C -Vd ˈ3R%' &PPap6R*K(mz\ȺNX* 'n˨t:َVP 'O\xD@J O*$Y(Hj/i,Q΅)\ԉ]+=pڑm,%V&MRERp "24S :d-XfB{r2NT YYo5p\nE)iOI3+2}Ε{*v7bYW*;ImؿfQ Wc8hBȔ4O!hm oniG0)$?yc%[oە.q33j6Agv`+=dhѠ) 9pSljqj{mן~lOOw-.tNĺE25 s[Ʌy * &pnMssdLryxcLHv튶{%qc0'5dL bo@&(F!G>O/u>|`O)\3<$7 2nxVQÇn)t6~I|zr.6/cz?E9|zDIq8׿ip`'hOOs}OS3)gQf VnCcu79A1ɇ ?QZ J|*7"~(M7K`^:Cp%\s2^COX'6J>j_DSKUc(mJ pDI/Ճd*m/_?> 0[մ?d& Z6W-kv>D hwI+'NjU$x3W1"}DCscp`~˝JK[OlȺ5 F;+{Gw#\Wr΍s);48=]=TO-Y&huf{FKPqhh2ԛ$sg7YvD̢(iM*ro7#YrL`*#S#$g@cΑ-t<-RjmpㅷeA,ILw؏5:BsF,̐" [DC,vU p`imy(i HQ8B0?zC$A: iɌ3&*YhW Fqy߿O3{Xc]WxoIҏZNFH=v/$fd{gh%0KD;m2 jk2b[͉;<՝#ױzZhʈ.8&e! e-\W0x1e?W֖d2s: *MR[ٕ#`%cH5Q91PA1Bbok_bD\'3n@8FQK,&6x=fF1%g k8żNhEs&ipEy%xaE-!ԎI?$lksEFF|\J~~>jEK{`=FORyPei:ѓ>%QRIzXI EL_%VJ[եl9X@"VS(wMjXΪ%׺#F UZej0BLgްknM.A2l 3{yo哓o>|C3nus{"a $B$W$THD{*qN`jnAWJ51 HT |$MQ$"H1ҾgI0҄-p;,Y5>@ɪ)',u- )y0~&sE~"z]`ɧ22-<3'}R9_Rdzk iF!,hN|-xܤB2x30XoԨ丈IA8x '6\ olU &T7$l9;NRYS\I'a!ݐexDh!pR+9C]+BY1 n!ue}V$ 2Z1I+Ҏ.윒1\Q59m*gv4tˤ4!T|4&s4rLHNcg1|^=Q9oi##:4^݌ s$A q> ߳]#o§7Of8I>3zjА 5dXxO1,_UjKeіq shSLc\9=+)E;zyU"OԂ"QDKhtV"wW i=.U)$KњSl%չz fɱDjȝ"z) )O$.ΏՋX͍7`<pFkܛ볋fT\,}s/bF:3ND|;d!^mO$𗈯rkr}/_3̉&GvvU |Qce!:-g0J40O% w3 yʾB2a2Y 壟x'^F?'.o`'Pƒ=A/c:m cQ3AV9uaFD+Rq+XI|^'|n A˚ QM>G.%]#*T¼LmeE $ȷɵuutsіUv7cԄ-FI<ǴPʲeH8qVbFQEѲ8U YH[5\))Y.PT'ZhF L$>pB/D h"}RaCjUUjH!LAak,k\@7Ù`H,`Ugt_9:V#1Q唡 DhD›D'V 0Ӆ/'hm .'mթӢXrUϲFepg?].lKֽ/ 71??>5CJ2$>G޽}tt~7#X}lK3G~? |\|g)scoB~gdwwp}නE@߉D炂ƃg%|d=EiKU/uV嫩&Tl>^0(\oG"T"(AҚRsAp=R%6n3&)B%c*I'Bc=Qܢ VU2F;$i;\*p,{Vs=a|m lE9g,spts*u͉ӈfL-ѮLM6jPlO-Xl=mj6GWE ;GWDjurK`S}l[ѤK=h2kajnW23xf ޗr _+\=wn3'pOxgXrkz>ˏ>|y-A?;dnVWwaW")ߚډu>\I*GQ쫵Bc99(8t4=>6`fi !X'3WoUP̜N@Wvӣ|{qԺX}9\F!q#H\\33KA|YG̎I؄D܈{ׄw!$I ɮ5AOPQSInٷ\іԟ[Rb~ʤ.q^9n RVgJݻ?S~ RaeWQ[JM6ef;HA{F୵+Tp9)Bm W^EU-;,{US4[C4©m{J ю Xe&m Y` H`,SM8-؃a"+ǼM;.(jy#L T ##]xl|Bq.Rȓk;^M~Aup:`R3>y[Z;A4l41n+iѤ-sbvg7zr5À9i'Ͽ2sۧKy2=K8ڊ2BEf8srO'ļRAGf2\`Yv|ak$.9<6hEKm< JF0lJA{S;q#\#y/'ӏ?O/ ԂZt,m?NvjNlf\y͇I[Î R^-R$%vª.[sp.f%ۥdQxۭ%ZE`)$IRk<\1cz%|8)!MIӘpg)K,襗;rS0D`Sq)1 ,IVxm(- $%52nV7" 4W'>M@5ߪ_Sk5co[-e맶P{󗠗Kzbx{ tB6Y{/! hx=@x#bsYu_ #72higU4k% o_`zm B8!B!&vijDtB`Uu0.7B:ѸNВЁ[jk Pe9ÉUCeoڶcavq;V )C?e>VX<)JՀ{ƾ▅Rn}sT_ZWx; j"3 DWs 뫇J=|MO9ĝC]1yg.NXkpF)[ xيT@Fn?lAy[S:z\@Sws8k#&DQ cyP#[9Tc2PVK̍&R# [zvmj u.IA )?M$WR"[&*>wؙ!RL\+P 0+@H^o>SI toʺc֛2YSo&%6G/I 0KGHq+Nhu${y "_u8'HIĖb}*h-j3:211I'~ zbMg񹖷E8uTQ@UI`]VPJ^bUN-UcD;5'#^##v#rJNW;dbt_Zhy]^K ɂz,t$U\}CTR>fp'5܅<]QϬ"YhNE^eZVjya, 7sih#T*p$jB5zMDq{늋",>{3]͢|v.lEBLQȨu̖<_hָ:4^bWJ3D4Ђk#H w$8*PrZ$h1Ċq{Q05^ !C_uW2OPZT!h~"Se9@`rpD?o Q#ݏtH@ibYrdgJ(ʗ.PWOai<\VF H'&үk|.I~-`$|ޘZ9j /%v~iN# Ixmlǫ 7~wp1)))ISBzn̷⾵zRudwnrW(M)zJ(3lx~3ǗqB9BS~4\DޭQ9j(rd ?d:F J$@ i-K WY"a-%|4@Ka&@ DIg(A #+Poxx :q>d=M\PM$06wA 0]C}QߗY0ZE#WQN-()K̮lpS : ]Ow e8طSG I*hCX-RYFH$k0lj%Ħ. L_:S.UeI Fs-$Vwt<&"% #LAUʈ3ѧ>N}T!O}4DWYSU~uՍJTn+W7Zl'~b-_b1`{ %ks1@0Q/\)ܽfFHlJ1YĥsAϡzJCT)vRNK9⤆d!^$ Wy$:T4RT~j)O%/j[jw^SI{)/NS!AYN|K>\naIi}&KB"jN%%JZse4ׄ _js,hC( +#0`W:HPcn([Pxh|Id<ϱ;,#1Y̞F"J3MKSLSSD( l ߔZ-J}ʬ)$`x0saDrT#? L"5) !k ck(NêB`^1$5p"%! ĂvؤN(C(b͉ "TwiZʙ*jxRR\-$ jaK[V+yW=GCDJ>{tA 1A *L5Fhe8{^C]YQ(UTQ(E=Qj,4)bPfe.2͊zw-~±ʇ9M͚ӛ6,;EykӘ ?9W ĉBJ(tYYIRj548?#PMۆj(lh<ބ4ѻ0|t7 l]v\Dʄ$p;Qi4q>NhIXei!6B߾ Q<*XDjCy/sNO Hfp*RRf5I)1/rd|^>@Z)I5=a8* ϚWn2 Z& }T=?8-!A 0Xat\c:/ޮ}8tz:>YYMGv_D~2g͖!.Yޖ7ɦ(:aǿjq@)ire'/nʼn=܍ܠ,HDrT6댙~ХucDY +CV:V\v\n܇ Z~gIyÞ_ftÜ}D^ 7K?Ñ"Tǫ\ 1֩uœ1ՀVs&b$[SBZO6 6bw5Z֩muSZQĮ_h:Z rҠUl(jL@ (բ)hhFR)cLBxg1mԇ>Pp"RK'[d(V8wD'qWg׮fQג=bEL#S;SSD_<lڲڃmEfFVQKlT['A%,wJ2f3LN WRKs~\m!#J"+H"Hte515 @WYu&1"+3Q1qE-E1BԘ)Fh_#;ς`ՇʍԓS>G[ >Umnߦ^(kkhq{s=DHB Hf!Ư\τ},Eqw::;1B {)QJH̎;$ 0iT.rc$"s Oc):nms⎼f]sck} JVewp"i?8o gbeMa~>T05,g}0ICsmuol1tZ ƹڗ.hTYdy*UV۾PJ+gFǂPÇ'^KEE$WԖ 1 Kb٧qvăvd rm+@7! J}ckk3TJb`.ܙF`2 A>n|vDUk6ՒȽ<4˽eKwdi8aiT24XRQ5nOI51WaTܴ_B@ظK}lePk|D}Ěh(}y +^lbdXؾQ6da4 G~݅;|B )O }Ä)mB i&J6 i'=2"%O،u٨>|)tO(M ޵I[ٴFY֎8I*P$ѕ=Z︃X1Dۗ\I,MR.g$ J+V(89W.Kf8I_#Xy%ZMP0Ԓ#YaSk ژV8>PE8=qU$M g4g0Rd1+&xG= LSj6ȱtuW(#'2 )x8΁`;1I ܗu0+% F^V"H.M`8ݚBos?jFB0i'dpDƠы]Wm2z lnFfM >M0Jr]aS|i}j(jÖFc.WF2I)&Ÿk1_4H38 3K)т+Bhakj55 o;w.8D9HBB;^gM(X0ψicjQSeeŤH$JuZ1CPQ埉Ӡb@kM+HCPcCɧ:V6]ke)wT9t,AeۀE.^B^!,lM-bdsn,a! f_ Ƽ*Z%6mͨ~ |b ;eBQ,}?wp~Tfr$ ^ r%2+R68# qJ%=ܮf:f|Owք 4GncwwL`"}W:-Y9yT?WGoFGscz^*NG f\`s'iE̡[Eh2O A*IG3/ {8pBrG,1c\j+e]Hš/'qh,HmWeH Q6kucE¯6[\GT˨`_ ;p)WpTD4UT A T"`(u"(FOUڇϤRXJ46hzʷWb1{S*P($rn&eScksi8St S릳Z+Ljײ:a$bmrvL BztY{%3S|3 D3Bmp> 'ex8MRV)>h(THw"Hً4@)$Hś}x_;E(ytpyyք8 "2qo.$)[Zyo/+7#); ~%$\j澈cfUp3Br;_ W8v$ 5iNوz3fc-CE|='iƁNJ#tYa:vԐi{Px/\.N$y-T!*LnoD]!4T|cիi/Oodvz^5B4@89*)%S  ͎& 0~~EǓΟ_Eݰ] `芧vۜ'e|.'?|x~cۆ1)td`H'Ԩs1_8 3Ni)F%{1Bg6NbI{_:_yš桠,HDq}BzOMf]<,g&YNEK*\>q(M grr'$1lIӨPg?f;#KR=ƫtQ@"7xUhq/bHdw󋿇C3d {ius=6L9k\~61C[K?}6BP6!6cςX$G(QL)nj3 (X[8{)rrV• Sr j^Ԝm cl|6Hsv>LQ?zډgzq>:'f™Ơ̔댒xI[d}j$E :V]5AEFP5Plzjv VBO0YVܠ\lp6!AQ/@Tlgaab"ƃ< ςGs+4"Eqlu$̘ #hGBscD}-9^Ͳ rW[g˧;<[̐zn5c6)1lGf@4ɇ,} k81!"5͈t?_>t;A6NO45k / FC|dEZ%aa;ƭ3|Oji%;-I΢?*s9%T`!edt >)aT^aᖑ675ĻK/5PfZd ̓|nXDsJAzq2U5Nd2+qqe88dw;:<WygnmR?l13[*X8ŔE":2QD5CA?)GiObĶ t{T_7֝[e(ߪЕ('':bTvGNb7N[v r۪Z_ylr5,Cwf2N ߂pREټrLmk\;MЙŕSoY~nG}o;A0r[o[/bXeVB9ZoYn{ݵoJV~ݏIZݷj,e yTd(&fIvBDR^ΡpVO"VRd& qU8x>]|avi6uql(.| Ľ houR{)*8]<⻸Ƚo { /tPVRڻx= E"ԝˉLڻh{xq͢ohQ[3GQg¡vXvab M\6PăXx#[eiBg%: 5]-]bU=hk0kx4]ZnobνËk%))[0`pC#Ý3&wp,ăB)oWHv2l-u1#!&St8{߽~ t[]pv(4NmxLjwyC s9q. +bvx9L.FLRKj$0p'* c$K|HHRhs4KP!4@ U/_wU:H;"Wh{x_'#Ƅh \ءG o&O TyD'uFKj V O}uGZW[mkz}w~y$.2΍mb{=ik܋YO 2LIӴdط5͇"Pe?AQ"B5PTk2"#Nb.DH})c٧r(N$U1Af`y1BTk 6PIKUzV*!UW)eū9ʑ #Eyū~G\rs,$f=k%xպ0%T֟Oi°;=Vq` 0@Z٦4օ%½I RDygEk0=۾84W8̵hTUXث+ydtYcU/W81+@F%0[Fw;AS]4uum-[/ԛ}3(%k0A,  }nF_C/NuIޗ>A0V+[$7Io@JZ D`X.*ص3̘oYkrYAz*O!V\i1EHQ\r `$ZD j3c0@j[33Υ#+<*a<^F i&1Ԅ@sesM&rFcd!.,.V8[T\2,,3^,V8oqy4T=fo׃SDDzc "WKNs)t{J_اW`D^NaoO%#>⻯:`U:4"M@lAHNBp$r՞>އv'haJOj֞48QRy4iSiz::QVL*}K}ߍ|X=!v89ɨ٧1rmt5Wl![sQa6sWޕqk( ?L_юB am#_C-/9a[3ǀ7Fcyt!&7 .CC⮓elXIpQ Qd:e{uk4Y.{;H ȐLi7phJP^)tht9ގ^}ܽ "!Hr-\BZ4!椿tN^RJmͅsİ 0Njxuᣊsh*N=d9i6bA}5Ӝ۾l0 :lv H: ¦wۃ6;;܋:asևVh0d!0jGw; Wvp*c#Mf-;:Nvf݁ kr`>q:ñ=Z%krQGXns#f T&7b8@:1g42x]W73>eD>Yi V]@8`@tk:V;_F>!"[H{O \[_]zhО,/7XRjSRI2.:2~hUrd]k2b5)ezdS jN϶ǃ*!3RSI dՔ8Bk2$S *噡,T+"i?fLX(B2Ct̑l Aqao kY, ojŃՙq5zsqnF{&â13|ew!5j׋Q6QR?Vy$%`gRD/ sހ k zib-J_o0z0 hn׉͈g&esĻB+D@dAz~A"`%><~h-QpӾ]Iy,c5>\^ZV0n*>KVU"2";>Nn^Tɝ:͑Qplqބn7^ ATMv*xs%4%(ML"WrQnA,;vjn,G/u}.Q>ZNeA.e^.V=#cQ:/? ʲ%ܕvfIሤ䩺] Hx 8հohcmfp A,;*s g㟭=X0š$U^h g ,-lc CYLτU .!qǐջE$\ 3{ l%?ʌV i]]^MGqF(/dӉ6>SSAb|Bo i)*dPQ.@45$`D  Dkf%G ]P jf>O$w߉F" yqӔYQe9hyk3'{![Ю{/kA㹤FzJP*rp Aqa\+)ƍ*bF 62P){R@ԋ] VlDȿj+<ǒh^ }42$]Qx/? 1`[sf9W#ϔP|Ot)6otqE<1i@%w*y|z57ܜ{K0zQM+JgK/6*wkUkn1ܿrz% D2ro֚.1{0]=9άUݐ{l}TQvDg m!Fx4Mem0CTtQ}ͪZQ3A1Gs{p9/9DAlx?!v@ǢngC"tGP:؀fCt%*?1'">bGL G+kUTݫn!wh(ȑh8emQAJvU$܀*"DqVe:)NjpQF4%c@0ᩋJ]*<*9psc}:YbDˊFHW.X PQ9nJ +mۋ\-]B*0>rp?载B(\ ̮nӀ \iH;>mBeqw _Ɖ HP^. /[=Xe휇Z] KA<ıݭV^nن(zbBOǶ @CiSb͈LZqDJq9Ǘ{D<:mZwua;(`žjuV<}݇8"{+2DFAc*b2DRDy"$w)Rȡmͪ!c}%KC &8aTID/*fBW|W=쪿ڸ?pθp^3Nk8BE; YچkY"1&Xwsn5s/XET?:ibu݁gN2fYϜgQYqo8WTx]E>汣LLc% 5Rʼn:吲Lk)JaFB6L?pfRE ;ڈ2P6A.kgU#!~wClJ]_5m@r9{ߢojj5<\BYd J7?ep)rVZÁ'-cI*M*9#ɧ? !o !ҏS?AVΊܭ u=(^b1xgO>8W&l^,8WedG0系,-BUO/N|VzAeHajK:;ge)۬Ck-PI6+zcr#/8k q) ;/a<Nb7gXm*Ҿ܉”r 7@wKO 7!#.O':VWwH!BqGZ`Bk n0HT)08+Zdpi(*OtؓP8I36,9J5+K V%LF)Rau\h 5#-ϨB{+%h֖JwşEu6KbqQVx\\_D:Pc-k=M+0.֠`r4 13a踛c?1YSI 3AXi$2DX2VS@1Rf:5LԲHnT0eH Mʴά`bZSk, nx7 \GXYc|ȣ'` punJ"_nsUįyEަEjmXGU 9lSnzk\p 7/*#^Y4R=\_ X`Rw`ӫQ>/keq:(Iwj~ulv|ckQƭ7K*]1b@}RK}6y&O%y䄈BL/t4ȗ$B*Y pl,_6>B3Ɏ|++U[*vS*R rr4sHQty7I>!9V;>jt,J˭\@gѿ&V:u<)\Ms3𸼾,G^.W1+5jjMtWŊ^nbj$3kͲ/>L6)޲wU]I^AW1giaq]_<1`ְ9h )1T~SB"$cJ 2',52]@ , C8\;5,LWk}nFoxl^R~+=lIQCca3É"Iӏ@|yů?_.Hk>۫w;m͟9_[{}:_}8gqg#aE 15 ]p5.rz+jy}6wP/ ```M)aΣE HH6%5h)I- vk>_AoknГ[/~oNaJfɜOኗp>(<8L*kGrdo~.Yx^\_;j50}b;5 Gl۴T6mEڴݿK=v?֦ slN-KqɵxA:%v(炪Ϳ櫽Sqk!'F0'NtY8qk᤾_$hBZByНu˛`wPvn:G5JmZaXw"X$GJ22'C ᠔wLJb 17I)pl0ZA5QY.5qoq؍jpޑfrliABp n\hDGnNMº P򮁺f 簠~t;v|jLOƇA?¯ʏT?n{َ'fe6C{כr[I }l6Bךvә$#tNG|êJ4OVs0$CK~V(C+ " 4Ñte#v S !d\öÕ5ÿTQZ*jt`^pZ``&'(m(aL~=`f?518Z\ta.jP3mea7ad U 0xe:D)>wOwޫ 8 &!pFcD `} G&Rtnngj ڣIXh4 ai^ \\B)ASB{b[P=zoƪ$yHD?X"u)&C& j̔M78),E؂B4jpR9uiFomͣC #Z3(m.HÉQ%C.Y©A#wMfjۋb٧lͳ5[љ$btXY,beBQNdNF©8i(`DIWIyN4i&`?SFFщql7 rW0+^~DȅW%Y%5 h=YcSqաA[1k+]ًw X5 Glem nkw/qX nB=]S)fZ?1U̳Dn T/| ,-mʚc|}0;;|0giɜ"lYgga&3! ćPnB9Yj;|Qh+7Oz]pM Wѿ^ TNz8p8ǃ DSL AP0գo%w ʷunr+Ymv][Oq J:eW]bnFO'$UDLG݌<8'ШʽgYVMv>fkVo::q#9~_cѶ2sL0ZzQ(?^qCQhQ#.Ď*I.L^UNILGcJth&*Xn'#Q3 SLYp0%RTtohJ1@7er@йZS!DlL&rzE9Z Rq&68(Iquͥ?CĨZ6;he8'iŰr%M˝ .7CO;rFAㄏ:`-C`*D.U1)妨M(! 'JrR3PNx 'ъhpAD5 t΅`Vۓ(d*b,8= / (e RD%G9.:!ũ5~a}-]at,~n Q]ήD79{TܛE|lב( H`${~A7;1Ƴ AѐKˍ: RH$jNFXntPqDPh%mR0r~ $.FL8nPY^yS{聄 uTyp&oU J -w4zD$TA}*ejcsND=RE8q&AgԷҢ3'Q-:N*&t,c#*s@Se A$N=x\QebLH+ ljI=2h&w[t݅]+jiwW[kfz`o]S2tw? oEckTB/sˏ^ tlw'/Ч^ެNǷկ^ 3B^i>saoV\o- {#%՟8/\+٧܉w*4B8ih\ Zm-#D\p\xм"IJޅ\:F2艤ZEUՠqݿdg߼neڷ$ײ26>Iwo?OOocLVOVPTHm$e-IRF"z DĄ%c)F<0 d.CZ:\3ˣHu9*ZBrKBZB.0t%kHZ$Ib"zHb&W7O5ga;Q޳S I;渓)Izqv yW$N %Q͢G_:9\B(`1UBU.74w\5Z7cݟjc0bk( :ne xN3)`/\%UI[&t@;ME)l~I;F)3HN+\gHmT3YOYGF0 \V`GD 4R$ `Tk +%ޒ4@!3]o&>Wx{}Soq03,fXYAĶiϡGv~i_޷A{Ob`e>~0"6ad`I[BIĎf H$҇ꄄZ0t {P*1PRPI$tmV)jW9vr0Ek \`XN+R9T+sMѰ,SUFkfY9k[6_]9(~*'g!QhgoMk7ʚ,fOND KxE<-Soטt3N+^,&i/EA(S t%-W0–L`L`[ƸmmZ SRBK"E`\* J7@UiAxY̴XbdE۶m-O20^Q=\!Z@Df9flZ[`4O7,"G!'4N/~@_BxI@mh"El&5a'˲ Zvɢp^-9 E2!#| H$Q|S@N~a[lF־^'H*ke~N=E ‿5`lώN„*~'@gx3->]q->c9;]4۲Z? oѠv$b8aʷ? )'~/ 6FecQQFe;O@-Q@~K (!yb0"$V$q9bg qa < <3GS[lZUwRC| b-nf񒼯F9E5`qk>^\ .{YJ-95ike-ͷzv(QԳK37VzbH,\wPέ_'Ks@YҜ;ҬtU6*n$<iU( 8DWXZ*~bKjv'r":C MLtQIه[/5L*5&qRpO!EbIYR1IcS]TZ1[IgG+uOtfY,9pOJOEJ+YەB1QIee4IHiJI+$Ҧ`B JT`8XqXp P?I1(v^_ǓIf2)MS{' /BZf֥NCp/=Ბ1^(zb_Pןm!8\Y/ FVxgQI|\%|b:_Ry<9Ba=f./f7scYĉS|S;֑_Bm.XEZ bM<D"nbb! dzOMٮ*NEOe'??*AT'/Z̮S3O~^W =m+Hmlp$ߗ9wF w| vwdv:Gw~Wv Fhvz)=FN ^3h'±쨳Ӊ I&Y$sskKvVR;R;R;R;KmåvB /K * ;DJ!S=܁9XkyGIis-+t}Z&1;u,c(6$U"hP(Uo ~)ճG2Yv\K[O0)֒ dYBU)5L#R[-p^Q%yԔ0~*,~> g? AXހxVʿ;;%ֵ0Q\b+u~]wweUdy,l9W0w|yr G?jͫ7^_80wgQ \39g`X7 XwgqĢow&fv~gb7WW|`!V o.a=mh}tK~-)(!u2% |jZ3}W\|eLi dWߗwW $x_ 6_ϡ<2'@%*P BP',67 FxG %?ϳ[<)@mqO`sE"D/,G%KQq Ѭ>OM6."g~g~ݦw!lUoO㚧񘢻JNJ#A计V<;xF %ݝ&l t}o)2GlkԒ<1WZܬdv{+t;Ҥ㎇ )rTRJTKd*{^ F$*Z3K SIdjQrͺk-%i (Xq}֘:ROCz{U-u;]8ߝͮ{4n`b >]%us ?ā]݉7d'4#0 w#AoϫtdzFF..eQi{\+`*T]/Rz]=sA֑r͐).6S j?t0g̵s_›k$0¹աGP%a=bQ 0yF֬7  <|UkDUeAFĤ1zD{}3by/N 6~ @Ds?_yNW,}/kp%ŞkL~v'!"y_sSkav9pf WgߚVr62K=.(gʊ|U"IL:M|fs;O:DZHg5$ROI!#z쮳1ub͗^*h!wr[Op޵qdBi_ !gdfvר[ۜPd;[$&MVJ#ź|_Ωs0I;\~Mv#rX7uy\b1i2U q=!Af,j1衩r/w( !¢N{ZY^2>IxuLx`r4G4]Q#!qr w`li a\"|Pa,^qׅ͗;[+?>LYcI3}k.\ 8۵fe 40$䷀OW~=8 SC"2 EFYa#H4$,4?uh0ehe9+מF(5%yw2FjHařTeArm=7U 4HW E)&c@@xPx>N?ߩ z;Q?yV2 );)oFy^,E%+=n.TT<֛շWZ_CΟVfDՅ<ՠ7̳jJ4^z*SfN!&r8U&7O2`D#Eg"P`)1hKG%Q/c@- xHyE՜-{/L*u?d:MO4#SR$x0e4S5ݬכsN*  6b?/˵_g׳B?4[mϷM>_xfkߺ[Ó2ʁ$VOΠGTL5 84Wbట32waeT4icȐsA?c݋Q&fA״1hIy:'rŃWW痲ܵw-"ǕdQMc3_GL+]GYokCז׆wAזڎSaVy0rW]s-y8 NZ*uQ"7#/-Rv*Z9] B>EY)ߖnR%t`o˼PdzQL~߉:ON=C8y Z"SD.ŭ^؊>"rSt ĘNѫ*Y6nIas?uK LzEVR(`nRPq&܃ɜ:}Kހ$1jh@}L]f2\ۘfCB"HSyq.NV`rt!+0C1@bJT @ TyQa2@%¬9Ž _-!Tڶ$O3QpYn`3>E/SB:a$E-;B[L#{ݿOyPN2-}yiy]8?Ε;rb{y̯]_4^n'b/dCTRv59(1y7y/[^ں~=q&6~<|'w[ߵ|735lDM,P?3˕a[e6K} }[%9/>ͷ7{*s*!;EԴH:͔!p7uH&ȁW5:W~og j( n _x{dDgaưR2uHp) k%BIхQǔ?Fu \,SV$i2m42[&ݝKe!\ȄX\?f#i9A-$!i"U{Vkƺ9T (2+mZI) qh3I\gQ `)ę̀B//Hǘݍ&hRM$g\h8;R)x0Dpq&8rAn!`sPCǡ9)xށ?s:PDEר}ŒcA+beIhy0HcbJ0_`1;-AT$N.(2\(,9EAZ3x1S%NRyqjz( n)Dzm-aJ2E)'=Ks_0E|ϑa JgXH2HY^JOZ /{!/?ɨ~B֎(e2.̯6ʼnH&*3ng䙆 KX 63qL;('|oX>ل'( R<c)93<<7(fT <'Zdcfئإlxm݋q*@5mO㢍hrAn $QhZgפukq?֭8uk^ֺơZnQDMEBK£^^[D- VSye*n5Dr ;BFN)i3&̰x29 J-~ OiI?+{MU8ϥ9ߘ$S5T? ^By/k_֚ T9s{Pi3#0a ")M̍Nņm8$_,W϶il T1 QdQ&LS'Ƀt{xeeFiíǀ eaf!8 ƙΠTX Hf9AhT9nVZ]̼{u4g,V*JGI NSC%&sLZg.IyL o !7͹K@rtV3LπQ :b̧Ow5S/^}FP1iLo\ DruC3˥@r}r?![Q^Z1 PڊvQٛ!KXRdAX+\C-x1/8z*0RLrϋ?y8 \;BD=_ZX'áֳ可Qk0o`}d-owH . m%J bJa(".}T: h6}omuR]e=[!d~`)>\11A}SrZQQ:a+gXV<ąCD)ɻjOӵ4"CB.@/ݞEXݞX u8)xeNU G ~շ2? ~^)F>!6K!ȀxvĥN&: Z?omKƀJ[SzDi0{V`!vuu&o碑\SQPԶD)ުUGz{c%٧,P!N+'r)quXՍA.٢9O85Tf*SaV=hKeb| g BAC.Xlzm)}Pe$o -j4粒݉j:bH~Y~[)vfM I<}ie1Sr탆 $U vS޲Np. iW50hK 'b=_~7<\MfVmI9ߣ 1JMKϵȳA𚱓+&wT!^1 orj4%wd:%MT=hEbaq@ L|^a lj $\$vd~;+[4ܥxIz֟S]CW3~Dh%Eo?P4[4uWk޼©9y]%tEAtTc\%XtDžj FjW95tLNZA̜ͯqSkrHI!p@:$[8]خItkiV*y!z-0ymqLҗrqQeulpm+K+52Jϰ,u鄶O^nb~49{}Y8kr)gqs2InA$0 @JFDT NMvFS3>GT3tHkΆ0b.լ$(=UNS) aTQB(I1|v%>X((*c7&ȔQO@6vyr$΁\gI GgI O u _nQ&8 $I(^h'{KPEFFIa//&SO /0=O lJw<)_]O sv:)NjE`-&_lj"zI3=0%%njQYwr qdGc$ xJ"?4'[),2]G8"6 ii8y@HKџv?wt7xRe RF*HV~4mH,35@!Q0z U Jet 伏./fu˜},(zэ̗<u&VS$ݓZ_^=wqy\3B6?ZàNxJSLj wŨ_Cyn"@3.z\"X3SL9L~b/Oz7F.fן? KWt,NpH,EVZPh3k@$rM% am9RW2j}m~=j+ye]ܦo r {r~ Z];bʴln]Rۃ#a65vl,> |_>^Q, Y #+~xwI}K Fg?O1Ejy8[CZ]֐=J$3]]篺{vNhX䏹{`u*qP(Quz3A$H7xn[߿(t;U01G4IB+`p& cG(w!34r DVu-!Ts4`PJ"J!J0҅He -Z"Xx[,^j.9HpIt&H$Z;bQKX5S "1ִ@"G7#Tx̢aޡ`DhS+A XtL"$hȷ CGn `#Y6jFhHh^D q1T6 "pf@DsLlZ`Ȃ[V oLlLQVJJ̴lA(o#a #Ej Csp,#. G%VPH`i !bFԤYLZ+b)[t2@S~e`%)9cļs%SyX6+9I$serY8Ap/ gE(,lޜ h786'.?ٗ l7vJr sYiJbDX\\=wp1i6..&Z,.pq1`q1%`q1SaL-?nH%d|J:'j$tGJe#5LS_F}a(a, 6LIZmE*Z|{*_YV ,F\+clYπm:H[P=Vi!!"{@:iH/=fOdwS͹h33ͩԃGX]鏧d1ugΎsmcZBZ=bZWiWL+dCmNU7 JfTg'S-ͨN7ZiL7rىTKtN6jf=ߜLmN'6=)3 ڜj`ESlM3ͩ֜Vbˮb{TʮHUd7_̊8\x> ,LsX栀 ʥ?&ӔP/'RnV0sM#_'Js̓^5c8(~8Y\1 =O70+߂i6dZgհL&Nń<{8㗂N=d=z9]WD9[_3r*SIv=7lӻedwk-bE]АW,"xsލ! -uT'wu[{'ILmͻ4nMhȁhN krP|[.NxYAɭyޭ 9pҩ v߻iv䎱wkdּ[~_bMքfDwwϻU;2yPKu2X˻WՒR=y\׻5!Y:n-ҢwAcbTlϻ-ݚАW,Rd{y7U=͚i&j2Xf^TLwխ6!Y:[xϻiv»䎱wk_Tm)[nkz&4U4K$Mb-T'o[{}Znջ5U4K$߷ !DE7g Iڷ=@5C]tݬ%=聾Osz/ˮSz+`#&]oe[٤%`iՄk Y@5z徝TKŻQKPۙ@ ZzA^ut};g &[ ] NQ23՘shj̐M՘%՘Rv5ܨ%3L]17j f*ƺsWcn Wcftgt5F-֓ j]QK`_ .sWcnbWcf< 1D#՘)RWcBҮ՘՘1w5F-Aok̂՘s }pcWcj̍Z`xjB#՘s ۿ3$]>ܨ%H,]17i =Ǭj]YK z1TWcj̍Zg[YcVӮ՘SWcֈ՘RSNUkd &DR`,X}C)xm2?ֈ(u}^2*RE |/{Gh4G&:w8Z9 p<]I, >tO-1] V//`sA }5%L>"sּpa+Ζscsxq~Lo^xEII {wtTSoy Wԙ!>a:>91sa&H! ,@$S#@W+A0S Lb %4-(ii~ 8y 0.%<Կs R JO:,e`B>dFJ2FHZ!0C@fPgEZYp=xH*1x7ރggRihYByl-ASZY`I˸T0ʩQ! xp@{I\HH(,D*tSQ3@_@TYʼ\12H{,5Y%* =9bq\ K͎ 9 4Q~{oAqg|^N{;@ ذ̂˰*<08QLa%ΠZJ8 n{`+^w9j12 yd;\կ;{.n/yWZr :d>G󍅭wf,utM\}9 "kN&|fj)&cM./'CAN|8"0ƏɵM'#8;k:bjuW5Y?tAGVZz_N[C!ᕕV:TSV)*էw WROb2 !B1M<\`N_8XKד_9.qm4%ЯvRVD~ut{p)gzIŬ;Fk '2bu$U$2׏ ~  A!lx+0٬8tۀ9kp؝UW+i>ϧ&51+jۅ_&j WZWns$f-Jo:Dqfƨ_^@PNg^F{)?B˟5#3SɊahIPB\]\C4]}k6>pOTE)~zcԫ*Θ+M!N=;.4If|猪my*MwRC+n8p*. =4J9*Bs)݃h˾"g'}x &'x`yU'&q)!I#6~NC\yNt)V-})[:;rS5CΎ冷[}_>{5+B+^eK<|VbU\Z[ϋCAEft}Oegw$5$A% l6诓ҽ !&@0Ve\ه2tk n~|m7W8]iV֘ae*[<$r\8(kor,J#iF9xrX19O˿jr 4| L-r kנi*h\?v|3Mڪa,b4cWAi`'i=fj1o[JV|'\gЕp|Y,ojF fS2,az>w:xjN}Y'"\z4$zkuu/ =z[<J~rSD7ʱK:dv=25eElGԪ_Eͳ5(!x v6^U\/V\Зjpf7 ;ٳ˚NԸB!h"!&tYgT3a~52.qbP?{`eK&s AQonA!ݶDIm{楾bXH Ό@pe5#1e \"e a7#M֔g_G6:k4d-05~ˣ)4"{,BHZfW7.I>tM!~wou;Pxg'5~I"9GB͗˔akВ2~m V G{}>LsaYd96d:eEdiÐ]MYt_+}a+ !y69dY* =}oC/pЇUhrvX&'Pи$kP3kR^- zAMDϤN}=*!%#@RNE]jr.fn1g \P$hu%˥AǴ{+p&L l)QXC#c7j>HA1T0f.s?$pFhk!'R[7ް_'|S"c(?=M]qib+β/f[6HfC!{f~RsC)JȂ-r3nSXr2 /BF2Eʒ#&j}[iյ&ypaW+7Z>ůM8{x@J싉|a͐8\uySI l)Q{#&*5Bkw5zFHI=OhbPyqp*Έ?O uԃcU1|%v{?!+5R掂ȑ`@ŝ)s!nƍ0s%4̅0ȍBKCR;JI*#Tv 5`"9RUЂ`/KK(2 ])p &&H H?=y=%C!dQ"@Dx}~9q&P9a(JA,Z er:TZ(^:g 9RαR,h'.:Zr$|Sd R/XXٔAUBaDZB1ς4-qLxfijsFߍ%m[V&)g(Oo#WC&d FxnW0!>=,YsfI׻SZ>yKO Yb"qӻy{ߑjۋ1fx?noyaʘCI3 `Ad{Xg()$}qjRA>dħ=>#X%v\Տ1 5c׏ttttRWzՒ 7 "IiPs 9Ui䆸ԎGR;oъDwx^XkTAn!,8Y{592fӸn/_~t]N&tՆB.h4~d3LBPKJTNJ-E9%6̝vPeH|Jq KmِS;uuDqːEBbT$ђ_!yR ڸP#WHF@֝=@;+)^X}nf2MD+TզF\ϑg˭P?<`]wx^tOWɋ+UfN.r'IpEx.f*$BkWޝ',fK5ZY{^p,m4G*Is_QN3((Ԩdgu\PIfOwbnjGvNk=jNl9d`R5֨%>^]T#apz,VO*x`˹xXK w 7uH 2M?ZV?H)1]hsG~_̪"I_m<^6>Wˋאwq 7A{t!߹&/~'ٲnaRe:}4nG1)YtBHօ|*Sqn[:E[ bT'&혧AfI"[h{P}Hq̹͉vS,=!S،_%f (Z&jpW7ܳeo,knnQ_~uGMBB5fcY|{pw#6,.u:5X5>4N.z A3Y wmj|s]x|K߽)48jZR"#d:[+?>q˸jBf,xL<ܟM/?uIX ^?=lW0( <_V,6r3;„7^Y&r,Y/Ax')k)5rvOaџ e㞕 3S  n&tǷplӅ e{ih:w \0t!k˻OYE |hyoW5f_ R@ͬ-ыg`e` mMVE}8/pCO~Z˯uq g[ѥ~VPw™#YlYEKY8g;iVLQhd 6xCF?ſbڍ[I|4NۣDi_fn< 8=- WK:ҊTm E8n.>;wyJ1E@ +(-븾oV/u3@AqՏ*1t%n0߬ ou$8;r[`7% OR7Z-FW)$j)Ah<ۊߝշDr3lAd['T7Ա#kUsZV3X2F#ɞ y'<:{x'{)Ut9g|ydhFGS){Fg U2ڢ#^^n]NK]knʟѯ'vzrjWc6ewa~-k^Lrg2Ӝ,Ѐ*/Bd_=\ϽbBR P6Od#x 0zz5Ro_)-H+EH#_|-oyKҞ>^9L8d|y_1Ĥ;6p;!T+]fx`K b"h:-mnS,z4oE 8%=1#@ N >]g -}wZ10zM1PjGO1 unP7R}m㹠 ў`wh8";[gEa^k]BlkՃ폁2h9% vx=¹dӼhQ4`pF)sү$$b=%HC-sIv/.F|IK LPFƩSҴk?w1M=vF爹MJv'A$7k}*RG'{Izc)K+e=c,dQRrS-0 U¹P*Z ѩީ%zv^z/~T+ j0RP/x+Hl+&_l|I[?8H\(ӄkaK!QRDwߌ2%Y[+4ӼneX^sB~91hIp&uǨׂq2J"˥`V@Reգt\WT:$N^+odjzr dƄlY,4_D~㪑TOOk_UM8uGsunߟ}b8{߳Xp}SU;f62w? pT6,䙔F!O6Wb&SZ{<HuExw!ysഠP]!dJP(J's. )f,_#qvOpXw\sI^ٽo=ӰK44r# L+ E C\w LJ* JZïj$I$r(c9MϥoNp^ 9/K/K_fg*=ٞTj+Nf^BJM_2MlM 驉e> #JEWKpXQ8;` #&#!,&uꅟU>@ ".Yi6ӔV=pU}.D%\B)|yR3~М)hأ!as+S=7V4HieX.,a7џc <6ʎ^X3%R*uXdLcBA = (fXJb:C!At8Ѕ zWs=b[զ SǞ{bufpj l2a%pK%1<&2p&OK@lIRsD;T,Gt1xRBΕnTC0TKb &BTXk=bB2,V(L4t9X(8 ag-[lTHʓ}cP4eӯ&4{Rif}L)ʝ#)]j ~5 kgFDZ= # 3!h<*s*.j5!k=Q>b*"4 ŁI09SbI: D@A6vli:>hIF請HɮmN^Zɷ^>93>b~Flӡ^+,d´H]no'y gi~$oȞrR^ \b@ `[d&:'w dl~gyKE( d-eVٶޭJ_z8ؔD2Hgy#q4/UqG3kGޫhDkWEY#4 dȬosAM5,a4ՉHcTaa tޓȕcR} .tu*>ÔQ y7DքQ 3G1tWBS[41::uqg-qyO=/=ڄƺ=D+Z/sxmp;ߋk.dq..ID9g##$,ew*4A4~`+=;f$߿mWJ%#Xpլ+}*b< GY(LD6̈́Xj=qjZ<Ӏ(@7_wN5s`1R_'# b朤! JPMQ2LHF\$[NPR|x;9,.4٧1Fzuׇs{9 =D !ftgٚ Ax3ճ+2w{͜]_ b߸j}o xGE#{jq3Y“ a5sm V?;x *m3*$4>;}Փr|Z3enkvFiP\v[G5zv׍5H #TX*Wz l7kGYхțj;?_b_~j*]36}#P,eaut-Pz|uxy8$ {obKZS0j+ޓ(\~ OEGOs7.2~r0 wy>D_+*  2 \҂dt}i.l(l}-Eaul##dOQSkХD)63&L)&]$EJͰugvX33샚=.d$T|o|6 HHڌ 3:PYA`$teKT[,x"q 7~~}I0sހ+(ѿpg_N7G4sSWZf;&l |Hp 1k8 J 1L/8 Mr9ӧaTjGGqz>z]b RmEO3狶Ǣ]]{zeRo8aQ[f}2Ȑ]_❓KX<.2ֿTTjjJp|{= R7_scG܅_}ekaB>єʄq%FRXєskE{v>NWWYj Jh RAX)Z< (JjΙLRIp%B_ 4˧c 1cYTY+D>532tSU/P2묗J')ԃ@MߦfD!\Ac?7Ab#}xY;!- J{:V3&SŽg ,',S\ :?0 DZdXo03E0 8 CF꽙_W=\#&6R>+47 %9Ob8auzZ,u.@df\ IucMθ1}ԷĚ( 5v*ÿfUGqΐ$;/,f3W65H lI:q.&3TvrJ6k\C-{@}]ɝEɝ8BW~[gj;3A'6NZݧ!3#% ï<;Gy<|wp %!E5ՃE6Nv jI.Ap6`7|(Au7ڪbPQxOU ^h͌vzJi:Dp]jW>|2"DQZ Gs\hvGۋajE0t^G>D;MkfWGgOm҆H#j/t(ԞKj&a"hByZcSBWuqP 2m-(3"50U\ٔg"IBWtab!W:*O^1c R@8p+kTrݞT͏?=%H1M~4&SR.:_L6d|~3l!ѱ! 4{Qm)s $[aç[[c BU_T,'M߱-uϦnQ|x J-UxUQm,Ff+"weϣ/)n]P vw\`ji|CtwL 0aEzΰ'*MNxIK'2(Iʲd[En ͰQ)x}B~31b)i~oWETEę`t>+~A6=RӫWبy-0B4  qFa~Ph]a ,'4J|3}MՊɈEP?Dtwΰ\.#qYm^#BN!B.'H;|@=뷛|QH^4.<#=:btVwZBL=J`e|o_j}׸WSow8\?t3v*:maqtWS꘎0O73?m=eʮ.nyh,]#'$rYH1ڨ =ؑ$Mz )xE"CJ}o-ԼoTW_V꾧WP y\$]QcW!}|-';t k|,Oύ͛_2=F˅n_q2/8mOSqAҝFl#h ӈugƮ(ë0\z7Q$ E~rI睯`bs |hA^?34Oن}Ut9hq%b^^*|R?:F\m) >D,Sz*Ew|rĩ =Qsc*%QysXG]\cWaBk [E5nZ]3&_%s K:PnK>\J1;6[[ť"c-\iIHyNˤ1fݭvNjrg.SN΢4%\r 9]vE :UR2SO:0D!r0SF| %3[%1BWG8T|>[U;?Y8o~arRPPdt}i*U»:)}m7G* TB5ۓ1Rl8m”RᤳKsS@'+[^wgXNUs`f5\+G튴G68.$rP;a'j0?mX$|qZ\wIXW!m\Pz<HcƳKFI- È':漇T{ϓgZHBK_Umb!$^/9uW(UIif83%5Uuu6ݾ_5بTcNP Cem3qF)$R /~F@.DJ8J4@5T2QDuc(Ѧ9 (coN@9ʧt0ۡrM Pk{@FzC; gFyj#|}l܄VG6^!jIUZkjY S|;ɖ#e{xg4tp-ڈӏ0GPCI9'ug+2!CzA$%t%BM`,Mi5VnfgJ2Ў~S%ρQB>W)p'NtˬڶB3lb'JK&؍::x2%+Cń IN4 d?Uhij(DZ)h McDi9ٙ¶r4[pUb"NY$O_..kf&{)4<!\l0a˿>}N K8XOug'8Bt8M;8xv׬dP{oKQ$9?ViݜZezv4Qo=,z}y}T86rȍ8V"x;k2K{=BD#ad- 4잱Qa?ջp-_Lkt~b> bz4>1i }1s̞"8[g>ԕ-;Z7R)J~*^ ,x G3\,1In[⟿~|43HnAKwHp~ӹ0Y->ͧKOyi;Lh(ܓxz;[23@*OEq1qB汖 rld˨:Zcq(W0Mޮ: PݸWyY6ӅDZφfz_hI_Qɸ۩ғR`@Gyg]Fge}e196\ }dO}h.a-x ;3޹L>tӠH"A 8H+7s8-b1c* LXr&OԈ%a"Cw8f Op+F}. _-6E[eVʦc{Sfݘk g"@o=}3hQ!=zyk); A]hΡ`zh`{T*11tchta6}GOV[ZZcFБ1< GDn pAQU1%O<o-UFԒ׋'73]^;1`cVJ'^A,;ueޓ⦺<U8}F >j&#W9wMrUqNM48-Bsv>Z$'RaVLeJю_xDN?Gd_|7}=Zg3sI}O-1g/ŜO5:?h=. /'i}gZĖ+;N.l>yYs#-ګ W{LjaFɄ,>%SmS{kBZ5}IZɋk"K3F/6Wхݎ MF7DPL17c%8wI e?碉Zsǰ a bti|e8:meF7]~{$l΅`+)e_QKb&7g!: n2r"O>azv5%>2^Fgb Ql6sYx 9G5{Y-^/goiP ]_zsU{*m8JHmT{gGgWC6d44co[l䱷@٥Z=5.g :,0iq^ӺgRhQ7]Y/fT 1 $W'ڊ$CNGυ\p.LJ%/l hgg2]x8 Ow7LF z$'i}~^" gOЌHeh}V5X=8Ôdims RbXcAvWc`{{ʨcvFl:@vH)J}~[IZ>i|N,VP?f R a6 2āUIm>}eQj!Z9ޤ_WZ?6h*Tb 4ge̚%j1YN6ɈhCtʈZ#蒍gQz}LRiyL*JY'̈.t$mtWv$mvj!Xt'D-DvjH:6&6pf6id!`4(2dh 3x S??3b4eH<$.g uҐpg!GyQI6 fVr̶{Nfdw`B޵#@nkwjh GnJiJqABBB:adtZ1S>^GF4rK9АJ*x8EŒ]_3;+ŭ{濚,14Jn18\BL ꐐ4@:YpliHU8= S5ׁM;1AP.ӝV<{'bIۤL ?}"6x%cџifQ/A7Z X[Q(F;6 wdA";_D%Mֈlt"YK2vr'QGfܙ 0$$՞xƳ#!YHDz^H4%Euޢs Y3!|8yU2|>Sl/>rJ?=Zwػ ~1Qͬ sxvd?СQ׳뾸g֢G'u5'ƏwU B3buSιayj9^?]^eVuȉ4Y'3%}10npmXf &׺=-gl's%[R(EcqMhJ8:6p˲NR!ֶ\ o7m2Yҩyx! JR:I[A:Iv{ j(i )%[cؐ_HآLFՠ g' )QmԦYcwg}7cۧ.k +U&Ƃt^V`LI=_> |oPWݟm8ۙflhfw5G.G4YRdFHE2A%d Q.XH4dCkd6dv9/0t*hOVR!9VHK9+mΰ9Mnn9 [>DV<&Й!iF1'HȔ 2Cf*iBl gQsΤN3,lD\`-!r5m 3ټ:xXmUvb d|i7%&#^]DraejdlΤ4Vv WY폵BDVk  p&&9Eq)ªrB*!醬m2g McZΥ|wLE"WZOQR:Em4f+FGjrsyX sv{ZSork.MW7{Q'hA]IO[hw2X:ϯ>':en{fma)gKNկSJz 4O3ܛ)F7rWOF?}8}e'ߏf#Y;'PK뚳z j!YRn{i|JQRk+k+-W;W'ɑc-bsʣ-4i֤ R *V4ЋަOgdԻۏ?:3O Σg `6^ܵ-]f ]]a[ݵ atB >/*4.q=:ŗN0G#B":Hwq!5Y&dHGYE7 vљ0T"WDHgdZK`hJ efZ\k̇؏($yn>~ߊmYz홒 4nVsρԞ+dupUHە,jwN'6bj5뀾4S2Iնɋт MHkҧFO o1_mrH^OoO>_ ;xneւA$6͕.?,ihXxEtdzfIt'u/W? @R!dVg\2y8R l%Ƿ3B2xJ7GtvrMv!gv.yb#m1Wn-i[{R>恔nOh'RO5hCn]1h:]F=E4Z뙡[6n}p7΢xJפ!ݺbЄut{:+qoҭBJ>8gjXykZܒ>cʇ$z8Opjzwqxc kwɫOn̿U1s^߽rTn"FEg D&b`dE N1C6 l%;ClX:J!\/1i3{c@AKa+9XmkdgРVH -$`Fށ 1#-:73eR)q+&Z;iR@'3pz1YG!f*knRa&ǭOHf 8-M*F'$ #f1 ImDza"Z*$-yQr 13yQP\s0\n $yR5/L;HY5֑Ybk7[L9AIp9`#<.r$g|dEz[#GK03ԦaF#5O8tXVl6_~"qCGo?ZZ~ 4Y#Z~VXK&OJK :AѨ'I<`:f@l,b>o)$mh1`I `KDgY`?{Wƍ K_H:_Օں݋+N_IQґj58|0̐MQF7fypSX Hc`8RmS'|ՒYVw y0e~HY""o)-Y(8(m.A^Oo'_p[PYR-hVÉYI|:+eBoZ*Cxe'Oqw1:<[ztb+pޜPȯ(/_ƿ^XV%O^{bkOarU~}xy3FK$o :tߠ2‡Zp>h *yybLaZNSfS#>2;D1[q?:P2= Jѽ'|D8kxG8\q(ZW(nHo^>(lJt3F Jy@.AZM<{p4Zd8z[[콺μLQt%~*ԫ9hD!'? sԉ\>K2]M7hb2\[\L dVd&}#k'?PҗC勋qk1cx^ M` OIWr@1v~_=M}  S9'"|.*>qg[^\"z25:^G˫ڻǭk8{3O2&aaG$ jFٓkX,6P:.A2\C_e փ1nWN/KCVC#vhKgQ0gyi01aḅV{4N0mz{6K?RHz J=­NMmLV1ۓMvevaNxuDۗNWLAKbe `g!&q=&j}hY}b)]Ҕ ZmDZk$uV* fX)3*y)6A XEВve|U^_Y/ h0YA(}^k`wT؉%Q%a BLpC8: .9JUt/e'vP)Ȑ،W޸Wڧcf_:Te[=%"%#Tt`^xԻ=:<]^yuW)HfZ>8}pPLv7ued]֝Fh.8cL)iCTL+ r6HЛ̕d޹`5us&,Db b`iaBd nU1eؽ)\ JէF:NfnA1؛/x@;x`&"Û >gF"?y&P^ ;PڼBϤ8m2aT!} ;o5P0?yQ/zY`JMJ1,mpZΘya=( t΂eC2s@Y̹Jnց `(we-UP\@,( *~E_ /qLnndž"ȿ=%)rla3G(\K̒Rߔ5<JJ,СiKN I_J8P"@1BZEeG|0;6BDvxB}3J3OB>(iHGQ2gi`.5P[gd+Ŗ=ѱW\^v?.H@X ))Cv77mS#WuK=ΐDlL3H#O[APPZM@)\ Q#{lGR H{mh26(L$?B:ږNzL %~%΃ (I(%,1dEmA7>poQSL%=Cǭf.K5VmAZT'Z!YL45b3PoI P0BI`q[rB@jHVHp{Bm>NZe@d ՈH!cwfܗ"= dTWeTNmzGRzDŽi&  DPWjI 8)qs{g L~ Xt1{jpP 7ut5Ժ"H[MF8fml6SIw3M$5l4cTh:Z,$cgdBijÕFOS16Z b]2Xe0.H61V:Τ yc'c7Nx8/eDS Lh=,<^Ps{q 6mw[$j*yGaanQECtEYwAhsc>rR3fќUwLGùLOeT<1*P1Pr5ʕR9ohCѦh;wD=a:]v2x;x4M k0܍e'H4*Z 7l>%ߝPnPFgowgZ5g˩\{= ,q%lD8 O>crn8X" Y(Qfmp:1X B=̀N kMlMH#.Ldh4w%)1,` 8Q=/ vB[yEop+atGU>.ǿ|솂[ND\vij/N-N^UU'3a [vxܕo߼SwlU"<[{#w=*=P@x*oGDqI՜*KOBf@LyG)fݝ!1\'/+3 ~vכ-w0.sh!čj.״/a43Fkcׇ >]_\?̟߱_W'{}@+jCw"۽}BQ*",ak;N,׿;" 驲ϙ;ڍW7[_'Ig? Ar-àN:[  X:!֨]{f(O;wqK4S3L Hq!(@+2!fCͦJc*5}Ʒ_{"/R!gt&2ͅG{˜0/e` , v|nneÒe?{XwP};|qAomV߮I F|Ϗ3;шU!{k5k`zNY}2מeAEՠ:-i+W,Qkdz\NMOT޼[>uwkCC^f锂O'7[-W);Fv##y[UtM=9'1'iY.5 &jv7Uqw^4J}1z,VOV*[.ϑ5D[0^ 3ڍN.nUӁCI-jY}Pr ݩxhX r@Q<]';ZG ؂j gdKV8j;[r}_2kS0ynP/b+0+eBoq8«5= ĵҞ#>Ñ9wy{0$gO5µFv)4 d{^)SkjT$k''yG{繅P,^m7T+0ZGŻ^MݩԆPRz*yengBrP+ {dQ@$@"+`#.OUu]vrwY=:7iŝ];{N\_7DNjz{kLvpv n| gFgo^`srx][or+_6y12/6yIa$[X՚.iFݬdU꣣I@%qіK@uA-AtpVj`;ij}f֥Tt޶]%4ĸ,q}!Y.qҦwݿY|o2k,>Ib@㎇Ҹ:Tds\D_7LfȨ^+ :Yv]/V6 2(K{^Eo1gum,ey+V9GH2lI':su*Y_Gt4-JzDGM>Q= *TͿBz(EtJ p @AJPh4B訪2rP0ڻnh`#Iyb/p'[LnB^Y-KwmWc7!Vgp\0ߪT͍ʹ57u?E dAzqЭjVI`(`FHɨñOdWdW,@YލZ | m:*}ǐֶn[h }ֶ),'7^QwAWѸ[)}ǐխ"[[݊]ݦDl臬nȄz+:b(| ҕzn[b}vҕRAIW&DGOTM]z~5 QUjR*  QAxDY)vwM5ս<x/Rk~T%<C^OԸBEW.&rWGe; Q萼Dg0VS E`K]I!YEĎyf؟'!H} ).f ҝy#ƙ[QUXܬ碴q^icTk%$n״R J6m1 o-`PZ›=x~Y:kLN+J0'$1s3 qp`(~xo}CK<\JٿEMи.#oRN9댽$AwWcc,A|F'}l}Fp˛&ŞhCK HzG%(ޢҾ hې<=I _14K.q1EIxAF6/dL0^●6hH5t# yaʕ]EQC[meVːa˝cHZTDd|׻M2*ynw9%2rgӳH"pn.>m.򾿹@DNnhŒu6m &1"/!.H,0Ky K_>~BB u=S,kryM\aLB2Qj:9CեtCZe*G-gj約a;s]a€Qω&R#PGGԈc/E` CC΁W+U?hxy9(| }i:ȺGCҎ8y֗VueMa!?)MxF*k;FA`,wWnSa4/nWC?A3$ ;  {t$cK#Τ95|nYjo|B⊶!S%NJ'ΧGn1<@#v7>0o-SeN!jŽ6/%`qj''h7r2菟S,|x1 .q1E*Kmj8C9꽁mYZ?M&Sk.`斤z,tKXlI)&sT+$c›FԢjvApZmz"ԜY颁<Զ:qT9Ka}~߃E@@$yzmH '52F\3Q:"4%=^ހaKՙ-L,( xYh.h1 ϣMi)RY/ bB JT.M'^BQ)J*U -ϣy9o_75g}y_S/u1p □_]Ø/}}1xuo?@xOONP-F0H91ʙсStJd3a`'M'ϸ ҳ ے:.r߸uKW&ogʴAZT^0L#l8f^CH s%=nEEbX jGUa`86cF&M_^eҔC~)Q4pyxn%uv̻[ڷOo|QwgyWtB7G%:/7Ѯ{xZݾ{2X}D//ݞ=FW:z}'NʔrjHŤ Z~ƓzmӉ}_<*o,/%h_nրkz#mZq^+XK_;?ڻ/W҈̛<"wXщ9gIV!Xp71DVrzxq3gDz%~K':K,l˗Qx}:,͋2(CdH^5Ƌ)sڋ.W%g88D")>dY^~9NZ)BYzJ拈Ec+r$zڗL!㓭˛Kw`Y6uf)9=BTyָ=[ QLwˡ %-oK,N mq+N퇨ad$c0Os'V@WT@>bBM]%9Mc )7&-:Np?ݒ1xeO pEƦS|T|VB&V*MQ!<=m Ko`D\1<<NEjrYiN%FJ CTB [g+] b3Z4yhmVjf;AΌ!r<!ֵ6)SStizYXD'8cd7R+i+=Oz mj-|{8b>Y^Ӟwn#ҪNh5H &#D74,D2#m;㛌J>Wsѻ~j,Y$3%qRPxRSqW@"%P$0#x٘$\L[s͜(^1@@jՔs#y +}N3) 9tge<gpȖۗc8cZ(^.|~ltBKds¹EO3-8J@"BZڧ|9$%?.\PJmtiiQDZGpG,{=xsFp޺)0ZD"&8b.n1[H|BJ6{4a;=sppVua#b){*) kq'shO&sWD< .DmMeʟnIGuJ|׿cYz 4l>]SW"]sE#vkW9AvV'Ew'cެI_g݅weJƴO4i4:۠;WPexF呠(p?$!uqr8vCzv^Ϧg.x=>ٜwn+'m$ٿ"`w}o^6I;V"K$ՔdvlQ"@,R" V&=0.;';~Z%WRCSV3K"J)ӥNEHiRE nħ֛ڛ(yb1mʨj7@i4rO I%K}/\ԡ?nNZ Ӕ.S뜊q"Xlzz8YAy)ƀX"CH@TfƁIt ɶŔ%[),lMӬPm;qiSQ6ęuCh;='q<:jN] %l9_sveS;x '9u% ySZNN˸b{*ҩz[-hQ'5|L7Zʞekdl[@SZ/xr/8$8<:}+@D??55'BCp<8aי24] R5"92#:N=ZHjw ҅ NRI&'9. Ҹd[q^*K3&rǣ0Dx&#k9F"g{ 魓5rP9 =b=>ũ| E-3prpQR!6g >eB  rǸB~LaFG,ϥ<_!Fd >oMq E3ƕj02e0mVU$N~AdfLD&PazO#+\5`[4 5NRPZ7jx'qU,T@UqSxQ< Z`4Ρӏu"%i]SJĭ/T}ɝ)H%n`'WZ~>)o[n ˻wlJkւ[57 Y'ĥ̟M׉qUy~8td:q: 'KmU6URm$ 8XoۜmѤ?\ S ݰ|%\z.p(gmR!q25Y,2qJ+)Z)7V ;vT Ώ<^:a7"27>WN*'F@ G Uf`>ȯLdpAY090en׮azKMj857xR?հ M-XQ-SVkf h& k)K<ሀllBWl*wad$VOZ0iB̚&ظ0GLeCJ% G}-n0m akσ< ?-B4 OgT eb8%w.#5|\k![/U[ȍ^>pDzyĥ8ǫ$W@=c|դھz+.YS^%zz#&#KSqvJ3Q06H8%!KJb9U37Z.6jp%Y3-Mncw4-]Ͳd08lU֕GiHip5\T'&3qSg.kxJ+͜|FG7qPCV_:A26W~lw UUqx#yR!n/d\Ki9/u{uC>R ʿ4nng|#MmCH"Eb0+ Њr:T0{bWVqL[L+RV]ERg'\f{%96]m^ PKuWZJ6}{| d@p'P:lfO(:hLJNЅMε̵22Gཹ[H~䰜P>"̘͟`4w dpڹ܍}:κSgAgWȝV7`LOӆ i ?B}NsäF ef9e,H`JSN)̂{f@Nθ{'rٟɤ1D@dhn.☝aV?=CקsFMC;$tEP.7jйژsVC =Mgйa4<8dRBp9X"oEFU*9e̱*y݁jyקss.+.*T~ '8;ْvyQ(ӏnZw+^&u㞚%u4 ˻[BU.x&s! CKrf:@Vn;PZwދT:>2jД?uhQ?:ؾ! \큈=+j)ƻ2K6aaEqoI83L8* 0hj Ήt8*x݁jxצq᭪yrndiXm.d@\02 x;c4ܺUϨRٌOM/X8|qq;Έ+Hm(nh[p+JVgݰ+@{_sam@c^˝\F\F\F\Fesyk{+/ {7;vj{j[_^FG@79>o.{E!F)*{,p][Z z0000*UXkqFbq O\`s$xMNƊjs`Ʋjk6̔l9cZ+1V6H lØ 83Z`0Oo."b !4TUY޺A D\cm&[TWgjT"KQ /*ϸmmUBV%4%)V㓠qLS(~b7_ ' Ԑ&v ,О!4=@:$  `4L8?\Y ƃwJ}AЋ\확bWVkS9Y$[q gE^Ӌ}`zLF|RrNlWAGU V14ZM 'qfsÉ9Pdקq!R)hCi2 GKas?>!@r$e,ٮKcLr<4MU":*(I)[| @4$9˔ON}Uٽ XG[(땅` mxh! oE5qrYH_)0PГ{R!r -20f\̰6S،ijLDNH#t|:n?LAhWbBLHn! j b%,3 HsHÝ7Oa?&޸;>\ꃱ~ݫdzOK9b;NGvQv4*~bKCh_Ў?_Y=g rsr0%\{y1Ha񫙛./˟)w 6>ulv<ao{Ƨ,«'xjNcowϏfiv5fUe/3wgY<˻w{?g}rl^{;h2nCn|x/6+y}U{V{/~9Ov h .-NWw2ЪIwG&8ŶEӰXɟ of6e[jdgdu_gߡ`&NK/# lō?- f簶n\>0WȞ?w`tի?a{)YW‡Zz{2|> coc,e= ߎF_x ٌ> FgWNfxqs &_lwpIyF|}?+?_w=2fV%6CIpwȧ2nӓ٦ФxCI_'>9]M+J#]v;;4V(Z3)f5A)KYsq `pl723'F~ݙ|\%̯ lZ? [l [w`cku~{S6uT[-hK> b9Ɣ*jt:ek*@4XN`xL8 1<4o5V4p͠հ[ հB ~}u֏[VPWw&w"ޡڵ V#.@8SY΅Y#& tƱ3Z+C U0 VݰTylKWY3w"ϝ'IY) 5qlۿB{+C{Tͱ+vݧRIpN~wKBaChzZ={4R*lU8.G0R*Ta \YHJ1 fX>&Hh˓,;妶?䞢+006dҒW`N a.Ptq&eS_D %'lRŘBº=9ȸ0ta DZˑ.ta #]-r.*  ڎ\2oB_菷-:q%*0pOjs nqhW%ӔG!b#(̰'|-ڔrUTX"T(hK AiMvG6mkF6򵑯|m}|7([mtFыƛ$R!!$]@MZL`( YJJUnZ"x(ÜŇ)XpĂۄQ}R XpĂ#\+\fcB@rs, IN t"'z @8~GDxbZA(ch9E'hS2j0R{tp8li[`3wƪmxh_䝐%o߼}ӋׯnWWΤs._ 3m_na_߾ׂ m[[\p~+$dMV9:ɟ3u (~9 8"ۥCnd[AV៎[Ew|6RX-]m6ɴ|4m߭OFlؤL $1hՐ!{\"V0z,z>ua_dQ H~c. `{j7ľ%bוbJR{ob [>6$ @|z r}q3  (* DJp f|6v^7J8c8siJ=˗w֛\f/@+Ki{ kݪ},d6:FO.ہ)%,pBs*k[fn-+͍._<(:W`QyCT}9n)(Ǽ{qqJ1sz{u;G ͭ pbM(?1K+rmZ2@jMi51Al{̪Nd*T٤&5ӲIlWǶ Rfk(7|h_r6 翜 NJuD/JE@X{ 7`Дl̖ -d+ ACwEYNd*kTY5Ӳv]̻*%fd攂3G%!֠eK{\*E*tR Β_ )/$-W FaS7p;od쫥CwK ,9RfOb *$D2OQ1Ƌk2RJ;2}g۩}rĒi*(^9Y) Os$޵pKivH/qO,A)/KkSJ~Q}/4rV7J Gh8\lC֤2)k @Β2d9d/*- g#xٰJgP"9V3 H-#8N0` ^U8 5 8hk Cu0{)u-qh'egl˹LB(1.Y?%~n%3êl`3\d ոGQK X'aq=Όg%_Me(=cY$iI9QXSsp]spcoW>3wyw5.b<OO~Ձ@̟aHα O`;krfg]3gvV Hݱ=WƆ+J@͗Cטa*hv ό(V.>`cWmѽj4?bxV ݋ ;Βٜ3M۶3\'%[uV :9$ޜ'ܩpҦ BE'=a)U׆ qnj",Hſu\$KKm[\ r~.<ϩz . 3 At1qAr#$s[_d1޾I\[ R p]EpAGN)qqAIB"Î9 BNI#9}!S7}ʕdۆV&ͳ[7$N99 lJrV:ꮦv\I^"8d {š^got{+RZEN.9oIC-GM'ss[];7WۺqtSLdwxA49}wy`/o//fJD8{oN:;lgOr&y..[ A҄`#۩+a]_҃݉}&5S敯o|SrY=4$Peٓ5uv^f/?gv5s'hOkazWU?;>nwd?]_]޸}~5H#ǝI̵|u<=X^N~=\FB늒^DLRɓWFZatD,$W{'C %RBZɂ7BGu㡌sq M+4WqܴЄz%q8oeᛛcS."| &0iݚu]Rw}c+ǓOUіx:_]m;ku5T_b_o9bګMjϯZ;GW]'G<}u#]K Fo~|~t&9^k-Wlj`Z9sFP95휯Q:?_/-}j)=QQz 5彥)IzT< ]8\u¼RI|u(wnǍ1b[>t䪺66x4f6ƊɧLqQi+PD0+P\=6#6Gz;ۑގv룷sdqq&_;,d,pֶ4k_r҉d}ũ_,]Ooϻtt#PN=|37͸|(Ѻ)BJuT*j泥X1ȨDAUZJIH㫘s~}FM?ha>#}F>3NFZ7Bc(7|hj3\QUg>>Ķ{8YX v9[ACzN¼r7IƠwM &XE@l),u32LlD`kEj!G""MߖۗtRɚ[nՀ_[zRc7i!h nW7?$~quAehWv~\ʺK$_rjO9iynB~]I1| j?voUp@_\^WW3ǷbTm6wpG8 [=K Ɯ'Go^ PdP|ݝk/00=kՁ-^ y:Y$X䄫Z^y_Ժ*|X:ac hcb`7S̛縦"mI v̧uƪ3ci;DrFx~=lǒ:ЭszT#-D6SUE͞`6Ut{u/U]hCt/2GrJs EmDZ@1" f8'upc~e*7a>RL m_9dr2zWx(u~\ D{"p1 }:yF$EEL\<^lp@P@}$)]Fx|ɐ\i;h Dm+ږPض_Zv'{Iv'MuviGi"Z^xef_tiҹ+e.`.'.7@^ 6fK8RbF+ɀtPd)jG6Ge^'\%t:V FW#\ T6? cMdm `HK$%k"Y+t_&p%d[X)hpAfJ4iQቿhziBqqc;6ņ7'dcKŁ>@Ѡ1Ev%b1 G2yȱROK9x7QJ.pAp]I E&%j%߯zVˡ4/ܑla_fz*/rERAWkJ2>&VD% Lš0nE/"l6/%LVVds(vtG.bD[W?B \'N4wQCXNU;iTCTS,)G/, *P !{eRm;XrB A")qi3ڣfg[@ey# ;8#[v; 55 uH $WPQДh j"!DB(uHTFٱ1ymJ%1ZTtݡxa$i0tB`65AKMvk:dЙL0ABDv˗EimrH*0T$de1[}IVI9HtK"V]$ʏW)K"C]D+boj[vC킰#x@>[$hmI-Z1RBLYh 8x"9:>dfy{UM2pSDL^|_F Fv.f-SB+4VnRBіv.BZ\r,j YϋL+D9;kTahYBJ'2faKhIlyh-dp&0B=Z%ai4`!D5ZtNN"ɛ|]ɌֻG?da# /$i#aAT69=w|BN;M酊vLe 2޴wS`ހC; 0F/i;tQth@%$C+r%8吕 |Xݗ GIҶfm"5^{Rȴw&Qs5Igtdm{{bl+.S<KTN{= 44Tܦq+6wBIukj4(r7up9S K+`roXY!`jI4`D<@L(C Mƽ4"\|`cJ;潃¥=CbJ?ŹLr>:,)4-%6fratv0Ja"&R+▌֓|q2u51V sZUaDj% #FK"@IQ~YIYJȎ-RZĒ'`R<"d&jg{/h\w䎕]}.ðTV{!/R 3C ́V4: AŴPYPC%XKFM%;KA5Y޹wF xXf,9"j}ţVqehD >-{۠m˷pM Y$J=6 &~-ۖCbo;ײkް IRٟq}\PZ-?<32RLήfSLCeV ni,G%g{a4{9NVl6e G>}Z/< ;f{~wkPڼ9^_zob?욘]ea#i 1 ͞N95 "MY p3ZPYί?a% d{۶ώ˥"aʵWdMCي1NE$QriioiԻD?&H&H&H&H&H=C2HmzBOx7gH)a0ƇUg}4]Su\D7Cjy$wW;Cj¿2f+=\!ӍGxfc-:aMİ܎keb?6qׅX)zE>4 _ :{NJ褄~KJȚI I ^&4#jesk}݋AKHQP֐(Ϩ0HDe~:hڿBc|*ׇ >8q_?dz'|oG'65dbӌuKڗU?6y|Q-9S^ךqg%\vQ6ċ7:Qvy;j|$NlM-YwE56$r %)g/6h |kfߟ㦞N7z*ۆNw=?s}Q>$yFtܾ?o$DWD8 s2"ͫ9CXEFZ>;g-ުpH3Z?oQt/1a1G:6$.j_ѷ"4Zvȧ+;l=լp#zF<;.Ջk,eUچ/ F2swA;_u vF'Q#x :%T @e"SbҪ1^@X"6e]e>,o/ZƏ?̈cƠĘ4j!G4CE2&"K: 9R|f=@/ƻK20.k)69G!tv]$NB?ێ߾{Ì'9b]d:tѡ8? 1A{&lUNC5UL u僇LIm:K4ɻ8h#Kܡsc (aj Q>Viu5@1rv,>k`8`*8e%h̹ǝ^7MZy l'R̝1B?@HE@BXYNAJAŔqNLEX$Ul<`eʾ(),4 . P$tKU^/m]$(bEH{#_?~|G$SR*wdzϡK%z[ǀJIU-r9%ΈyCU\2*G2 @F+e43.ɡGL\WhMVsn6st%Y]t)VB{d77vwWVKh-hVո}\QhIL@lLxwwJpsUwuܪ=)PV\Jn\>9,qm`Nڢ/TX;'#I"ƪL L'?? ng%f8Lݜ] N4^fN5ݶj"d狇3coFj"`18v<#fFzڑvY!,oi~jtv_WGW)3Я-ηq3JߋTGkR!S~X""uyV-j~+Ͻ3Sa8> dvZk&UFa +.9MTQ,ʌ:hMVz{R9;ť6֋.n[ $/h&rei=e)t4fEn\6foKKdU9niȥSGAX}9tR* GXQTkKa"4V;Rn_'|.!coTAnݚvi@.ZlL-(R<,OJͤuP5#Z`/RCˉtU3^Uj,.5ɶ-wXWt mro_ OA[j*UO1΀ήTFM2ٗk}3[:2G7wgZd %sH_v[7ѺE&Ӳ_,T2&tC3ƭ? ՝Kxf$OggAdPiVefɒll%nul{XqW×描bꊑ@̿&9$ugV~$_>}j!=(MKhQWɷ1C8=>l|T+ZgZsQb~WUKGA;fֵyZ.#y\Z?SIJQ" !z/.\'L/LJ 4On9eNF0 eC[+61Z m+dDj/;ۈ(Q-G+㗻:|iՀH;c%يzп I Ŵ\*{S>xgOve],Ǔy^dϿDŚ悢d\EdBL&8aJ3iIe}atSug;9W`F=dH::L]~/&vV MCx÷d;7Ct} w:= 0;+ ~9!*=H_@lHƮ`MGYEŇD+QౌD D,ޅC"X7O&4G1J PEU|/{ŚQ?lrvVNW FFB{rf&_"I99NG"6ⓗtبO.+ڜbGS b$c\ y/9*JNMbDL@hʂ)KOOBE[w2{ {Z*o'ٷ-OόKTcf2R.v 8Ң2B< S$ .Q!Zu R̊{L!1[HFjG[IR lF-tZKɊ;";c{PB6+e;e#Puw3”Y !7M9$rE' w\(f7%<{pKAi`0&HDL*CTZH+r.)͢x#z >@1t(9%){ɋ@Ѳ~^Vc/*NƒPCfȹ|ddϛ#xr:mdTNuE~Jt@ȐJmslP׺eΉ{W0"o1xpr`X YܑCG9AVbtcC>D;pGޫ^ 2Eܘ-WTuUe;9"~}t|}4g50\_֫SH`Fޔߍ)&c10Ki& tilAd{81r~ͺޱZUz;{A>!ᆪ]'l`F.úaԓ"P)eT(0*k_"{jHƳFkuٰ@l~\y䷉SpCzÆFx tvO0k#3"VΌ,{(r%$㾸B9g]XQ&:SDdEF@0M W)!MJp헣)v)R:4Nڲ#ݵ|*%/ *_abDƃ(r+ƔT1(Ru9-f-뱻(PU/z`:I<ٗ#C~E3E0]wH np!:^ *ٚl wՃ VJ1Nd!ϕAu" hz~XJ/P\\)]rMDBkI3I &K{"߸T<`:l05(EIL@FS_}ʑ3Gb(+]HbdczN B->@8at z^`BZVc@^/O'yDz-Y*/<4p3>qOS􅀾gn/AZq:,JJַdP+֯tXi"u{699c5e-ӎb'uM{j^dhGJ Ώi1@Z)L@Mq:'SkD)Uˊ)㲰r~-jMd|Ț]V--/N_?ʅ`n܆J%~;ݮI}3|<+Z<7E #(& 2Ot)9V`E!=1/[0 =?TPVLBtO[k7|["lb#q^l[BdyJɜ? ~z\,zk2MK` h%hSL8y]5ƫ<a 6z^̟Rh3Kt,XfNo%AU]CUݥ&A2- HX:\TGkH8, Hh~KP0%6ΈT>k B@W%JdQ\C݉ E}6sK:V[ޯG\Rv={?7> ?X d+hIWSƇhySXfkM5b0G@4|Wzʶ` 3 0a]|ǐ5oƥϾ=,>=S&*ޤ'4e< ;?{۶ ,Ma:~0A׷1 yZʒ"I\#}ϐMVLg$[NH99s7C K&cЦXQJ~=OFԼ=].C ݊]Αb͔֩ -F\(6t /Me8uZ>I]p1ݑ+xϷVJfA"0bHtj8_`M!-ЗL/&15bߴTJ=\f)melu~P-5Տ̷r*"1}󚵑V`ciKM#褾ߖNh/J9H z}Qie ̯~ͷ-_0bh1xon︬Jma5TW;GX0]Eazth*7[ ZlƘPMIEsjEq6Hz }+Hn g/5_n(rg@K߾-@Kτ*EE2L3E7AwF\V&.Fic/Xf*Xw ԷL0(u>‡l?t:U굤&t|a: n{HG!} hqJLh u{*1K<*G$4BH+%b6Hx &:j1W!J6ϛ`A;tP ZiWYĤQfff'VZ B ؾo7}fPc\qgw>^^'zuޏ<-cnW,ZfquYv"Y@, -~,wVo*p䅙8Yj_~5P6>^'cd_LC9Xvd*Ak=7Cpa`ۍƧs#SM4KTkWk?1e`~CJl p68_ l2{i.: 57 iS<+єI1g{94.Ҏv6Yǡ[VD2@aϤLWxd؟6sVtxIy>:?BԚiγѴgZ~8=6`h\ gU{O=p$}bzHm);1\&) )EoA0O ׏^t_>Cc-]"pJПc8*1( ,s\k4:La~&gfvގ&ٿO:;!@\i a&g?痿tͿv0"zR8I ߓ'T= m란-.U= -TyT(=J?EOU? Rs +4ZP5B0CloB>{v0Bwcwe>bIsJb_ȯ\8Yzޟ'Fܟ];9K'BW?qb,Se6K*gj88&'a<^"4 ֻ.9e 88o?Q W T ?^wU\g:M?iZfۼ>]@Jg!9tʽI~ ?,{4Y eű4z+ǚD Ӑn Iy:?,8{n&C}'nug^k@= B`? FϻڟBRS4i 8qPtʭOGux "\}Vׯ"Ds2 )yI6&ǣofN%[~/p/)f54Sw˄>KC^v2[״ 5]_ +od^UYR?,+X.o+uIyQiŲRZ*%)t NY166<)s֑e"52|'QG<4*b'NkwO| ^"*"sntܠnsu΍ki"",jd/c3xHEX0Owkʹ(sy}zv!5 4tru1GuZ Vά:-Rk܁O&2NHQ@9AJ,&zjGnGYy}H.6D:ѥ..L P42}Ҝx8o$P HTivdȝ4C®k>r;ʲ3C)y'Γ I;+$26+1f]&HUBG9`jV 8JB,9҃,XmdX~3{:r;ʲο:0e龭#} c*хt:n_=8d+|LH"BI,*(! y %@M0!}c(:r;zHئآl0pQ @AxjJhH>@^ 7GNx0GRM G}kY&oEc(>r;jڭCU1$ Wf^0UͼJkER \y"΃Q69Jݴ|7$M+w+םGJVf(a'8="jL 82Q8KnGY#6pEްq׽@O>WVhrRmFZOZP"[jvPTYwu889+nӪNKZe$7繉P{9h& tTNO ݾ|lt>W[{0S1ew[o }I -5j}s(6u+uv};Wb|I稬[GO_^6h]bܽFZ;,7'uo{-\7 WkNK̍s㧊ngP!%p tϦH<"lij)<&t\fJUlHܓ"qO=)ܓ&50]`V}WN;if/Վs.? 4lw+|ϞC9(E@"dcIbxt)C B{(xJ+aHJ1R+# Ԥ"heB'  ݮs4 9rg=s,(y@GXϽB W7 Թocr3b"QDO!NoZ.= 589lː-EbQX$FaQg6,|-M ܂[$nE"unrY)7G^]z{`0tP]d\BzJ l "dAiI X=@1-˄NLZM1|1khTDP-AHբNPm44ҙzkȽ?ؿd Wbϫ?ML*AhfмQLPLy3ucڐJwD+Xin}:hYT'H`nA1ArC&xqs;ʢvզ-LwmFhdu%] WV V/uhD d'h2S:2o-u)?z.yi5iWO}sn^soK7 +n4A F Tj2@D$V)T6kHT`'ZCuu:+-0Qxr0s:!@[Q=(ڈ'' B*k52{:H)wJ[e2 qe`)* P \gjۓvמ,k+0̐|H/k4f%H)2 GwBEXb y M)])0ybpJSy&53pZ"([8 Bi%L_$A bCJ TpL(:P1q6kN0(k\cx+E\AFAAð@XA]I FՑ2o٢l&wtRV <1{ r)EF(WhI)qcCx9;S"nhC#ꈛQ"aF ][V.c&^Bt S!(;:D6+qr^ؙp \>m)~E8Aeۖj9;Fհ eX^ءXyY&!Tح-]U '\5;bub0)V[= ^Ėc)SV;1qԧI+wUҞg?7ښ|?u>9%jhsn'yg7VOqg S0G۫~%y3J~v9zvW>y2`LW}hO: Fթ~/W1Ჯóww, [ SʈH;A&V]HIѹV~Կxi*ij< Mk ~J?hs"1[ʀbūLCoIKƮ9W9< ZKBNePFCMTA'//Ǟ42 Ndzk8BE]|E>Nl%WcD=9F.!^y/?C-/ mphJBH $ݬ׋Ew^6/ 䡀ܒ0ޥwu凫xw<?Pn>`Z nV 0BR)U֙$UNE+i13Ʉf6%Ff`M:U2sB~U:p?5z7-BM)w>*_*΃q8AeBH#:<@2UJ㗗!]N{NvOl{Y3c n^t5A㻁`&C@DT~[0K+z }/m:1|sx3?}5?ػGd iHrg uNh-}J|@OeDCZsDFS h EEKL]IU`}M#ö=<^۱ ᔬ8w?sih|tN b(Dm W~}sI!QR>D~l6ߣ|wY6ccy;5B=MA'?%?i L;ڨ9R1%aSsB>d<O|M!:r;_r"Fq^7f->稬_SGXlYAnrkXm&c^RmpbDQMa:}w5}p4frq#dET/]g&ww<&x}!Sk ~*BE%3z0~'D ה]h #OA.lרy珸U*_覣dr">(Et+I+62E)=0L^[`ùL\t|۹mVVmQ q1PjW.eJuDo `==$ޛ6Z62ɬn)EYP@bF$< VB'`WR=6v3*x"p\E%` ̤ZL:QuDgx֋J 3ZPTVmJ }xct*c(J?.RO mn@檯۞VҘ&ј&ј&ј&EcZz)yjS/Td?e,onmOԔ!$ƇT8. Rzi ï2Zqf&jP:Ok`Mֆq;Aom17Ѫ2 ZJ{ UfL8XਭhȨ=n',u\_(kAHnVb_hc{G4w﹦6zZ8 4a,wdwF30LnZ~pG)՛_(}|J,\mDp]qSﺮ2(1r3(1r]̠]Ww\];q36(#qfTi'Ո_>Ř3ԭol) Yk,&9ir>,I'0uMgvVrz~gAVj"S1!ߊ:t $sqr>5W;{^k)WWwl<ƽL8d*܋9'ܫ4Y,:K؂84b*Pv"ƆWUN =ˆZº %û=8vXۻ&gb7?[q\D&v9-ƶ}qQ+a{x=J]T 8hPFGZ/mRBﷃu`7.joErndj\4|D?{ܶ쿊F=Csݹ㦙ɩ=Ν&t#K(9v=wRIkeb[@p.X,CѩD7705Coj.kGƄІH^W4Jsx]#tH9߬-fY9^& qz:R6z{`oxS۫)N6g^EVm-\Sosb4CȪH}E).[g)?7 >`W|ҫҞ[6^5FTJWݸr`wC-oŀ,P'lf~dsJRiYppjEuμK̤` tۻj$DG$&*0-Gh5?:j![\D[ņ(xqnjϧMnn NATp"t3/f@+Hu,d@+l<];t {ޘ`7MUeR]"g )"6м71B@XQ-H"2Hh)ɺ=tx܋Z _Rɷ ң l| .a,݁}HΥU}嚥^>Pm>PL"\.@a%<hk|֓"xԽp"wh"lF`3U ꄭ]r1=ȿR]z;vDXՐ>9 'g:G7+ uϴl@6 0gKgFYBa۝cYgtN,A~PKk{׎~;#(G)Dt]Kd\p1o@#\2-kή񋟃ORh,{J +e)z뢔w N5^zKv.=\Yse1ʰh<ߘcpm<:h <1'wbMOΆI7A:sPRd4~(Z30q1gc:u[&$4ỉ;u1mcvyQ:8a4(%_ ލf/k 4-Uj-++xxQp$쒶4vF` m)*HV@ CPa'9_$qQC(o; ׶^Uз~97 }m{<8/798]?88>.} b⑀*lq[V [peǁVZqL<-W\ }VCep1( ſ jv۝G<}VP2mc4ciq.Z2jеFd?PP9.33 bW V[XK \+ O2R"-%,t?&SiȲPEިj@, +%tYb{5/ S q+?@#ZE2MX^a1+'zO+¢"jKh\=ܶ9mT=L'iF\Ot4^ 9`aI_j b` k[茙#[ps \7!6X$s./$sר=0`mONZEXV= m}ms]s8W4qؙ Q f z} ]oOn9%$3yYQXqڃ)]If:j/_LـU2飁ޢv{*Vp>]Lڱ BpW' ydnu%5kma#ke- lziC|SEKLCJA@J56Bב%H@{ Dx$2dNzl>1 ZiHÄ(T h\V$=+JZ0 NpkNz A ӥbAT=װ]Zk6Dӓip]V6^m*it\,5GxRK$cP;;Ђ^ Kga`V$lGu{ '0H{MgzZ0ByK+)'HMBs8OW:rIHdV|OlV֙(Q>Vx(E(kA@)?1gHRʢH!D53Q("Ab06rveT(OKeܓp6i/O'S6,Q2 ݵ8HU,d!EjX,L5c7' Uj$ɹ;\dWFT}i$xIbI*Vk*$Ee ɂDZJQQB 7'DU뷡IzfZq#\ o9%_>\eRIu3Yn'p% H\eR?K\_y5ɝe>](bfS%! q)T[&0V y!Zb-3Lk&q A/x`xc}r a8D^p&^͸!`[BMLL  L0pl)pu2mlAP/r0,1q#a Ř눊bRCdLD|X 86Rḥ9j&g4Qe'Z? ͲWX ,bi ?l!+GWJ/;_ DS.B H De%*A? 0r%m@|27˄fei IVX Z\v(zTv E yY<=WF+jYj)P!KRKI¥QGed!A$Z*rFN---rctT ZNhL/Ơ?}oejB7L-9< A};\^06EP!fɝ$ԻIo~ 4f/hp.Y6vNox~>%aN<؏Xԣ2AG',d6u2;wT2$0U{< bfߓߧNj }P3}[%H,dL\q2ٛo[#[ƽ,B/xDJ(˝GDo ɒ)ewK 읙ʮIJufj^X>ks=3o-`>}4Ú$M(]C*yUϑ^[RŽ]Q/ k7A,]W\8ڄ4ڤ8|fkRaׁmUxǓ5a 3oJu.k85 %Jq!qd|i$fЎ]V74+)a^2G{D1'X o;A™ (a2Xvlǘ%CφcҤOl[dwm+REq[$89-Qb˾Ӵ8JLIĢ3;36c6^YR 5PqzKGln.ѳyˡG'}{qFM"]>91 @TeGߍIu1h.X)QͥYhhW%c Ն Sy1j#DREMrWWv ć}qFщ왑kaO).0[wE""6G "TF{R`d'iTD7d؋bJ;2 y JT:Wpg]%ïD9<\T:DFvcFu=Q85%18Y} /d܎pgZ^(cSCCedaBp-1' VRp"[Wl೻[~|v;{֡XF_*9iYM3?#f61.ϖ:dL<jZcqs?|$pE;ZaR:CuU[)ҍl8VoFqB+KLFsɵ6& *˅ÇLr{*4 W|ᰱ*3%ͱ\k! -XKx0/Hp1Y} &[%̢j:{ 짼Tԅm|7QT`'` !gA@  !w- Q#o h gBxL18 #$/]\PNeY+ݪ:\8 GJ0I3d8Bp ᮠik^BfߋoJnoB*ky3Y 1OV}I˄@ـ' 1w%6"D"^a볜Y)!v޼3/ݼ~=";G,XJ at9%"wGP܅vˮ}\ ޝE&`*g~w#^T,AonJ.F$#+Ip|ωd&*,2#MɗEv`<\\eO#-GﲔEY8 ID{n;p bM"1D ԧhgUlkaҁQ^ XSqxE;,BHE4Gg`ȝ# 1Ù9ya S[n '(<Հ]A+(Vh $d*'SBu LѥЄK!L+'SiLj3 |bydJ!i=+79c*d4l7VP{W3anJś>K|~i4Ői4Cَ aR=tW:BV]ePxıR`9+EB|' +` AHnS6LQ8`wa8[F,LWN%o7Ao!A\zIC݁󦝢O,-c!kiꝄ~nBBܕBu; ƟU;bWg5 TIG*4|SzxX|olq.Y=lTkҜz}wmŢ@`{l   AU_.LL,>7:j-[ţ `$ywJS-O ~M{f ذkkG`GU RtSuۏ >vu>iÈNE'=:8}2 N!،YH$?dRQӃsX-F_G[1*D3zzZ#0F|J)<114=S.v=l"X#L Ew܈`Kz6/a8vSv]RJgzSzwrY{%8"qfpYpl_z ݋_~O>VJdo>_\eWO sZDq~jT]K!U|WB =xO|>^?TMtT, :& :1ApG&>&&ɢ%3`h)w\=߲dUy72Hi\zp3^tGz=?.Nw1m,Em;rL?' UK.6j튨)isyQd9$ɽTL(K :DY:JcZ,@s4m둖]?TMMϓ )!Yιtal: tLhcG"ZVWL* >nB=ܻ<+f7g7yQW[ >2wdSjH(*=F68n< 4| |`(ִbTBУjbA sjgf U&-x &[)ɧyJ&⚲M@,~dEWK0W?El*Tte{, =P?\ E$#ّQBpOAWSz/vonF@Fu՟zZ2S}k55p}U',eXFDc$ɂx^m#$V|,V)dLipBh7ӻm\$ {H,d8qo,irU:JۓaAa%C;*oL;9Uo3o5 6Sg&5#&6)jI%bn,atE3^u9ئt|?FVSFŇ'yW``5Jc`ࡃod'5VkFpbWohx=h `clߞC[)G7HN_A<Nґr4;jl%|q@y 08_^l~=*K%ָK@C\V0R[[jLL gL))tƥ2 #N YMiP=LEsX>|~FvlJp_#팫b_i84m`xYczZ4.&KsBjd2X3 O m옟&K(էQ:a*Onq?9쨑HK4T6YƐ)&0dQ vˆ Itq>J=/; kt ?\L7%('bb!u劕Vb ۞F/]ws[Bd3)yĔ)DR/ȑ,|;UzѠv-yh($Lql,!bF&HcI-kX,_ҋhQ#"̲ݴ/7Evk_hټ%2Ue]J@/ӥ@iKJ_ Y#^BWM/2J/$+TW0F}ى, P68?-n˅LOgear$MMa'Zݠ/R֎ `7S`fve37i]=LߜYIj+yëu]:!ӎCTcн󠥨_7WngB{(;AFY!zUQgDzZ}uT?ojZ;0= XD(,j*(fYJ(l4Ri ; &9L¼(0"*$&Lz% Nȅ[z koCS$Jrta sg16נ6&`PY#ȻBˁUP0* r }"SUk-*ܣNP 3_=Cjɖe HjC@9XA8E:/+n4w) l>|bgigyvL;D7Z'Z'4Xpa-q0Kad Tc/sEPR#ΩWXZ( 3-EYz~H~.1C>h[-%oH)=y+ކwGL`0׷ 4TI_y{~VLfŚwWW%d|a ?\ϓd%;7k~N `l@EHKϮ/'g!+^IP O'B@q)YJn1jT]c$ Qu+by3Z78V@i U]MRB>k35kBfr`j+^?Ï^r ( wm4eq;!dr '/'0H;=LSojDIdHWU?|,*TS)}X4A>eOePWEJ-5$TNSJ4O%Pm &BcX`_z>uaB]Z-/=h,r:Y|+.bb*b.ԣu=b}~]K=xoI^Z v#pXoqmGY$I)"}hJ=28 KCɵHeh~zUgډtƐ%]"FCtܲ3dxf~EfukEg.)eG,ÕI x[bC kdBpNJ)'yUjŘqیYlU L0q72o19pjeooɗ7 sIm5nT J-TU1"*nS";E{^ViI!E)nߒԛ-;?~c onJw—$m R:!A/޹(^ WT;$ڕU^C$p㿛M^ҔNp)URg7_ Ix&~% Z ( xJ{IFś'$rڞ1kRAx<-FYгA@K}9#DeYJw;OC-PTJJX 3|ԃEKYB.5”[IB0~8OgTcbJR>E9i,耣=qCB)X#;lێ x=%)m9kw$b;3\S*֖}ڧSIwMݑ8ӚMO{StSZsTmNg< ț4l#Mӻˊ$8jHưËlаCGdFIӃѲq~$4f4uF%Wz{hĕg{[K9]Hh rSެx&V qP(-c̘Z7x{XsyĂIO[ޢ#^3iXw&=jP!HF%iLh|xcsHhfD$$#8'F$9'$4&]^ OsؼR_!'0x=+ pTUJGi'qR;7WucT* *kDFz{)p3_8=Mқw/Ӯ @u` fmaBw&\z5wu﵊ð5%L1N_K48BAL4AG,ٸRaR:eغ ɉW٢&\r8t5~H=\f Z*8zTS4prFg;) ('-0€ uf~dpՊ0iwoB.}5A_Ý6ɻU,4Fj&zo&}VabTiJͼ铵ĵ#kWs*t>U<ᶣ/<;f%'s` #f8RfdUJ֋ÛgM^A']g/B1Lr,7yǭ/g3߇޻l7{‰ SJ/OK{ntt@T`޺LFwiL(劁a% a7̴sL$40g;}[Ƙ3Ɂ's47/TMz3*qD~=/G%|ʤT<^Js0CՓcTt:fM6aoӘބeަhrqB|Rr4pɛ0ww:NoW}S*EH/׋ysbw.rUgAx(*J?Jkoetٿw]_,DZ)qx!]r [ݐ億uY|]VtaQoj!x 2VO׮h٨?78-{ +RXO32RP3>b } 럯7VQ&Hö HE0بחW"Ԡyͤ*$ށjʝ7me,NƑ,u=;`"ڕ 3BuJ a3X`Y,G𞻺ދӔ09eayIgˏ0!]οb `oG+hli]Rlnyp^^ŹB'a6KV˕zO-.Fq\/].ʊN('RV.e{x|a5x<k, vk`MY016e7,za!śE  ҇UCJUe(8Gv@Zf@[۸>TNS}SbߟV JVICg]YaP:ZxMehp d|62h*;{MEa=P$2|AmմRυɈjj u۴p 2fd!aݛo%F|R\yWVu12].9Do:2!h[e8*0p`)8iv0?rOց3C./+534W5cC!XA-#˜szhqt غiw@%u S8 ҹ* LI'Ƹ2] Rʈ/49b'/&ۧC2lv-c^3EC3$2|ĢLG"VT|2F\ˉo/-!DhI]XHJg'r617[LFh͚ʔQ@^3 :Z!Lς:,Cr b5ӧ &f)J5hP%!waLs>g%p=\ҝjyiJrcHhui Q+ ((/Aj%rϚv ē^/3u (U?QȣJ|5vC??Ŀ"'dQ u8e>.ܬCn?ߛ]:fi4NfЮ*θJ D0^UN{jUfPh R8MEJ^J{%|~B:YZ!V(6Z--r:Y| /߯bPoz<4\/>{ 5fۧp4nv?pR;-C((e ɽ YI⡔Β JeG!x? FUJpYz^Ԋr` Qπ0b ՄaJkQt&ˏ́G56AV:KCHgNitzO{Ӧdib ;1APdTn!j$SPX+XI3$sbJdiuTQufY^3M~ŃQ8u\sб E/JTD5DgNBZBP2&q.xrf$ssGRԖR֠m R,D9RkG"h]z&ʊ+( \S<fGjN% A=pmYE@㓤EMK ˍȒAJ"Z{:w9|f8CZN$ R0`9R0feRiGOa<+o5IguJP,IQmʀV$}VjeUBRbUVIɹ'&[]J#1 $0k1BL&; J,%:Oi\DV l7yXmRȔY|waShX!v8\(xdRq0f|DZ&L*jh"%48f:?i%حp,% dI) _e' m]2~G3}OP㪆+Gtifl1G `jNA(!DqD R;|K 67WE' m2|U.r?@*+ƥggRT&ޤ`Z% Ai!4mJ'}ko\aހyu-J)m]2DIm$b(sIeκl<5} 4@KX1M%DLDF K`e9/wTRmbv(9Mq ~8:"Pp`Qpp/Ax*#)HNHgB0Ԥ720yĤԵ%/3{,;gn9ކ:݉6kcw+MM]6pyIU$bxi09~q3S a KaK20ڻ6IMWi= Ƶ:.ǰ?Vm[b _0b&]goL0'-~/y<*eF KNq[ t79!pSR$G1A\>VPKHǡklvy%,RMJ $K6guAg-.5P)oKP(#eZ`E %|6s@bY낽vB{ )QR*OINdJ4#:IR.1B'F6,cKC41ԥN:̄^ƅ45,@֦)! `\kbFˎq{L#.v3ITWqE() ;]~n) z3)B}Dq~sS~"l1~h~waś(Q\P=c&`jd{6ŭ ٛ@0m 6P -}0xMM3K%uAb.Ⱥl o9* lG:B2|s'S1&غFHރYxd[(s֊y;:}-?"}BwCr* 䴔SnwODtw %\x3}22Zy1iQwnqsd&uF$r88?oS|AΡdrx,Clҭ3s=*rH960Pr/{4G8詃J/F28w04fnKƷh;foK9NlLٽ qNY[ErFhk&]upaĄID >q+%Cz3(rlƝ]񼯸qnNҭiD}QcC{Db5ĨrqZstڱ,xhaG(ݽHW<>N^hI2"q^ѷ"#QūykQ;"Ͼ ul+Z"PsDwC|y\}\ fu@7rUN?}>bX 2_%X:_̝.~}l=Yt* deCk}6)|._fzL2=˙_wL{kiʐ!M=IB&OPpF*Miy:GuNd0X:jE뙙ͳC zlbt|^3:;S_DpT|gDg6:vŕ6cm:=W(NG܀L)LB,Wi•ΧF{GWRN|+ui'`ҿq? ^fZ`^Ov-![Wq3J*1[t@ra[C!Z0LZ! *ĂKm-p |MyUig k%D˂,n#pRkd1NH6N9bFsͩJRsATaaa5"\kݨWn])#!5p^fexopiI9ER̚4%MY!v8p\(x3'-r\AMxUig4וUbYagM9 +HL0X©Q!~ JRFM%qI"xytxUi~nU ei9N*映ny%L$H˸EO٪e,MAk []B]: J^K# o ?+1ZUh8e,)-#mk8S̎m JOw sf4VTQpvD9nxi\RE'MwUjWd} aNX' CA \TJR˩0 {DA+*CO$q[F'&A܂;5!QyUZ|HGRr)k4?֊Jp4"|VUQj1&c7ړϟn[v&#J,t ?.IWޓ#F ݇WI2SZ8A"5SX&4$ R T:擔9"c3 RpP*Ӑ8:p$meЎYnP7x{"`7 c/(@ %  Ý7T"~Ĥ{BSD ArqK<  = wh(vȥbKF¤^9g)a$0rDa8%,DЛ`'tr_;wQ 9xo~bfx8NjJ}{|s㟖Ϗ UIxϲ>烤0>9K ;) leXl69`Za ~ zWށ`D&^8(x`gl<ָS>[F\jM6|ɓ Lgע&~݀1H}{mXd+W/ ؟ d~*sR20>6(X{OC?~4Gg wޔ5,RNm6ЮrXQVdԝ6&hiotK(Uu Z.dW`9naQiQ| ,ۛJ莚E^g>dվt Zî҄| Ȯ=k/i,\2(gk8vͰ=T\ZgfW5rl,vkKtzGV,U{m,ka S?л*ԇ^ˀ|h.Sp^Gw3aa!^⟷%L`WHU3Qye@Rl;Ry~Az"^9B8T.92g`buRfq鿕tw15=󼹾lRǦ}gYū u3&h[ 2v;d#OzLUpJ7"7`OSvԜyC*rE1M(3{>r!G!M/,([A' , o)rwk.n?>uG4$M0 a1Cb["3㏹ƿ.\A~iHfӉ+N|8#p|XH9w" Y y>Ea kd gM\m٩!\<3yk-\a bkD8-O :F8I0"*l&Ⴇp,\]B_roG<׫ͯ]^{3wH> FdO]9gؙa'pL?qߍ@ ×=W }ttO_O_<~/޼~aخ7}q#Hjo4Xy_~|S?O-^~oN~{}E ]^烩S 烳,@ B{/z:CuޚEg#PI^yfwgn|(vۼĢt<1ss"oקNMlpj)AY4EgGn^ѰmyA>Vf?Ŗ-nƣ+*q/~3,Бypip Elv tjrlώg+G 7vPȾ ?("/ۘfa!>by,K#A݅wol0k-O.b1 g/c g>4 P5!Vﻕ_`6m]Ihuˋ'5/s֏>)0x|0bɊG'[@# /+'{? )<1.<_jy>r^pv< "lvRʓd 3e/5/@-߼mqC&\@ uȀB܋v:*m>)V6 1<,qQ.o(lκاݒ~?Cr[+WL Lh<2ȋgz_ew1e 6/1ƒEɣ캺Tn{ATb*3R%%# MS:b) …&[@%T6 C%(mi4JYq( T1Z4SK0%ZSk#"\{paq(U3)o\tģ] ʪ(&Ѵ)A6 e{/EsҞ96]u<[B8ܶ7CikOY8ÿO#s\?PIx *sS#͎.,YSN_<]7E/$hl^dǠ89 BiPN"W-crqNRiy@DzzsŇ MʻE@;|4\/QNOBz W-.*["WT2K &U%4L -5I`^J*:4 -}p4@w&!-!`;:[:JIt:x MS-7E TzpR kBm0"z5bqF`[x)J/k@iy*vq<쯏A*I;eJe>?~{?C )cهOIkߦ]?:ޟ0TAhB'!-TӇGc&M@Q/\ E O"d3'+;}ۣGy)\׏ӀomfI"~2}onQV=.i22dy&yOʇakKg6gs >D14cF)L(1ce+|IW^Z3h aKSlƢDO#0^XLzJh-D2.+ :E6@jS&LVEg2L?$-^īe-)QcLPt-C\m˒8g.H:ZT׳eR3lYL,94b:/!mQ鵝~*߅+ _b$kg~ԌwҀjXݳRŖ~^MieSӷSw;ŝ%<;k,S:;*2Kb{|W\ВR ԋrEi־V>o'y8V +MV@ ))0If.*##<-MFtrZnͦJb-çlY?>|[a`]PgSuaS M1Y.Mִ|{n4{YZ3%>;?F눣mmر{=V)yFDj uD0Fi2YZV8bN-4G^=X1Bئ7j1i=,0ZjZ+nb;nF mlk+'e-Z1@ кÕ071fjC]ZqM299%D2`ݑmGׁONov ^NQ[,nLy.*E<QZBTj2ijPLkik>O[yښ|^ݚ45 uZT֕Z`  b^qc%>@hj8?x5לT+/zmq`JeT{2eRPµp4 7 B%di}J^4Ym gi:$:&{l;.:J ErM;Q,<)e%R~=lb˵]k[?/{=b갬/Vh:F1h,4劯]+dؤǻZUATSow-ɴdq!z51z^wNM3aDf@O mcU5EC0%%SQk$%ɝ"0$/%QXzՊfw159#3'A浃&%hI Qh SG+OKs'2`8L#E#hwz횼G+eT~ j$t _ $ۘSZnO7+ٯ&*)B.?vTգv8x(<4[G>WyfStwE)._RmlI>]9hYkm8Tw:;hRiLtש^\Mڵw?ťϲ`|?7SVFB( };+c&i#:h"ݪfwsvYuv jILP6P`ګ$ic~-",PH.c\^_&2gkwW 訖=0RñR85ꍥ&GQoB!CaEE#Nf}fukZk1Z}t#V 4l-q̃kS &5z.UXz@5" !z/Ƴo"Z Zeu+hD<{SӦL Ýo>u5v os^ uvS\3F@?HT)ۥkL";H7--_uUlۏ[ӂ_SꈌQ\0&2WD,H#*0+Wa2͗/5nf0JBRdx}.XI=+wrrX#.TXێ7 Α զs- ]5upW`5dmVÀv];e6;:)@5#`x f!Z,!?0ggkr1߰?c,c!#|-B n&Hl6 !}O/[ &nC'P?IHJqvXJD=@D CdV ŵ_:F%wKAx77T󚉖'b\uGJiK|gn.}>=PBP֋lϹ%;;`luu!IlǑIsqоc$uWE>c"Ҫ3ⱬ8ۉQUO%7M(*'XDFkBmwҍ+;@M5щWYxxFf:1-h_^C%x'Fu`jMZ[a! ^EoUeI[xJVs_쓙6ۛ(ޓa r)0huDoh^$)mU]p+$JP#ZuuΫ!V|ITVJ=ߠVMzۊ/W.u btx{Y[ǚ1y潾,ϱ]Ua9}vlxS?g%kB0q-B}!_YKd_1B'kߔ>#wf{@񻽾jէ]~xMdlsj,+?9#M8OHǴNVE幇X,pvJ4=lFV^={oZ-7jxX=ԕG{֛k1@8JUՃbU]h~kt8GHA{G (ޱqd !Dߐ~|K !R찆٘-ۇ]z,_DpoEg?}V)҃6nyDL M앷I_3WlloT^^'d'w1 _M}w7i$҅?e>sa'~t?y<w^ (%΋yɌЎ[&`rG q2 ɿ8`D/o>N0On}<"&|-Ol4\0sp*vp1yjMWId?<yb \x'zmyֆiQƛPPXeC" 1|` WSpf#Rƃ௙!@.JpVF9u (I(!Wrʗ("HJ5ZG !!ZuqY?EU:>g$v3J%S.-mdI%'^W4HJuA5- nnI aRz=8Tsl GD4F1 c.bDK[z1&,'QE _(تhU`SmmlIq3]LbklG?+2gu;y( ЌŪ{ñXثK%#+SnRli8>l5x4'18ӱm tr6[9>D!;` 5I׃N+~kZOK&P94qz(oc颇J NDY 2_͋S D`@Q69֤u|rnDD 5|z`\f VR y8O`m9W69gvy >5|(|t}ElXaP@Q's[ĀAV"fGAllxEBO wgoPT94`Zc-xM%QGB(bB~ y6\4,LC.`WPW{!'XX+#G وP 0HR`P2ԍQBe`s`i.Ӧ VKn=QCm|DTRw a"ØQL#@ap E`h鈸HP "(AI%ȢF72ݸC P lHplJN[4[kA=.f&̘?'f qll[FP0rp{>}<;hŽq2r*B_I&12g9HW; ! [w'HOraKޕq\W)sd7*5 #]iQpfsG"9j 5aR*pi㸮R0OPd{$۬iI r݅,\a*[=4\h%.u ",뵻? kx1[SGa_[ 9ȷfm{ KgVu/tWv#!Dv&e7s9- L{ *m7+MnF~qB?A5w?W7`)*S_lk#!ۯGe:6qԺOo{E5L %+pTc7ry7^IA3F] :;U:b\}dO͆;R|KD/5#R/ٗ9I_tRnG6;hOlwt`>^h/ͤ ~ 20k*8[p0TO'j2",VyAQ8xu825|EPqqftlMdg3:I8De. |(f@*^%إFX݌֛ftdM=9sIaU^Eɽ~6JpSg,>ʐܛ 4/T'TΩfRJyīNFF]3:̓-G vjʀ1a$ Nk`Ø=cRiݱi|Dj|>3c543RR M쎃 (<_>Wz~u7x atU \\#kϾ)k8 5RKlD bj" X+̢#iƬ\- (d)7/WyƬ6e Јѽek' YيoW2՘o9(ڤ"¡2 0!egRW1[=.p,BAd0 08&br-EAh{Le |ޞLc(.yuOFdwΗ樰Q?M;$C9.r<޵H-g;Oe㓃(R0F֏7N=N yzke \lL߽TKԔ^rנ<$#o2x eVSvΓvS}_R.mzi4hTyWldϓIYZ$\D+Ta`_GU^hNwnhHδ[j:$\D+ԶaZK0np/2)Іb7Y\+IA8rַi5g[QoHrd&ݓ{3>Ǯ^awo/JTbn\saSgVB E'NJKcCCq&0#FB) ll(PAIHbiA؇?z#mԜytZzPp-K(nHD)pn" A$FW=N`&} zXhMصQ]Ra-6Er9#OZ<98D,#{aʓfuD慣*9W͓/ \)< }q+6rjv\8$~enRz f!f˚urm.+z-*~7H(PT}nEM6aidQp(ADvH‹[ m^4< 1q䎏V\}"cI#)ckjPX:UlpE!A p̺M+6nGPdq[!]^PuWk3sI~ޞD׷4od@xƱwWSj1@͝l`7pʓeJe/g z* >57FuvB?Jn^_cZ'~&_NH]н2:$٬}+h~VaLa_kJ54S=Kdr5X0#m'hBap&.3%$*$>3) f@6ᨘ.0wS̹ 2-\Y,u9i/(=8^; ί0mpJ*ja2j 1Lt&^Y4VSů8= Ӊ*zC.ߝnSvHT'_';.J1ILEEsm:Ǵí6 5&):xIЬ/Ǡ`t?o/upɻ޸sx9Os܅ǿp~dƏͷ[;)r ~]ʮ+nFG]]uEWhՍ.eR%v%<Q{otaJꜺ/>|8ixu9g?ߜ~+uuxe3te-}K?zŽġdYGlPHW9mMnCg2ôvә'^h.`4Ho%ÑMG?G+I_BJV-CNSNM *5ɿ+.[j~,; G(J&Ǡ&GxOysV؂e;hvL-tu_K&J|zL~Nˣ-ur?OӼ tӢIw885Ihv-˾Y xҷs<٥#Oaܪ㹃=4XZ<&"X7r~kCAȝƔ`/bHԒ+SΏ? ~%ܺKY( !BIwgV*ۺ4Vb5(YUcD>16MIZ`Gb-Gwo~^z47#I;n"b=  ׄc4"&PȗK,^/Kb ՔnhFQ"_KM+EU2b ixA;aZW`` xbHs{-E'#, /ਥ(~6Po #u "LbԿ!'}&Y@]2vZ(rqAr_%29ysgaR5# 8s={x`V3YW?ZMa0滤DnGj3_52;˧R,> |#^e+YD*ȱbkZRBR`2qNQ2w8=6f"gp_bQ>ӫ}h˝*߁RZ2[)SeDke-0D(4Ž2Y*U\bT30Jx2!0`*S")UhE(%*JH㨄MI﫯Oo3lvg-^.'Wa1 []eovfڭ ݽ88Evn $ 9IxncRN6)m5,Z(Z 1?Lg j;Am,6W:/Ƣu.HR iނ#*弶Y/Bc!z椐XFb16%}A#c pƩ58{9%9YLbL3(b ÇGvKʱR B}+P#f}bWש}}z; 5L0`F gnL$B(+ JRX<0'2& z6z R-D1PM-7RZ 6ۨ,e1F)UN["A. p<qh9kƶ~d oEn=b*Qda&#Nʠaԁ@"<ȱ5@-=/I .dJ(FqD{`isjokYfA;+:HMhaXVhhrD"1apLM"mM4 kD<)܅o$y vڜ}WM"g326O?g{;_zI?BJ__/Se0⇔>a94(w ȪV5k@y*?cY:D)6Y0TЖupOeuȚ&ЈL6XpcK0,F?$M~:F@ 7Pd Tν7g#0 B3 H̼לJ<,}V c0`k?aƤX0[_/}ej 7XY})_cfIyxʳE*[Wؿ[F{~1M*^i+NzX8B{c0KBJwyMf??,|%atKs OV6͇JH>|jDnl(#"iKǧpJc睲 {1c0^`ci 4UXx 6ÌK%A r K X_(1Qb7x4$.1STzOTEYP0 SgufX-Az\F)XpVKS76+$eHn]T}@ފt:9qu )fU}f+ct)@3AHNg;%&˪}R5MW\.3+GGRg^y[5j[Ib˻}'N^C1wI"ae{zk&jΒeg%˪|e.UlJ}^Y-q)qdnRssK`BN[kvw]εu91=\kIgv2Zi1%Œ9Na..'3~^btZ+6.=6h84 56.z]zUkPcvyR+H*1c# yW-O'oWbʎw3YQ5Hr\X{LS%[r-Α4E'J~ϖ3JR_.Obv^&#$TI۴HUjoƐ”Sr}5?[@񎓆\Vt ]`!Od?kSTvTy[Qfwz2 HW휽Z'c=ŵދ+~pΐ<%J銮 bXWEC2\չ1֔Qݘ5H^4zSp L>qfO#::AHUح0+}*yb] rIabl0y5oc;`Ϛ~ӛڿfMf a .B Cr􇋏3ta {~{g>0w1&QMn.}%_$G]3 O~@+U`ư`Ł+l@ xbHGV~k#x;.xLeud|ӲDƩvfx)HfR#˝@,.iB#4F#,T ,t_RbE/Y]?ߦoBas&y67ɳIMٳ=3{ᩖ[,yNIVMc ov3JqT弚/jWkQW CsS6xUUT躦ȭ:R1ΟK5ɋt?݁]9sf4NEd2+5z׽)0ƇLE2>M_%DZFKiH-5 8c 2IFz J{:4 ))&  Qw$Ё2"y["eF0ŴSMz\%Zhlt\#%hQA5EMP un{ƳEd&glQٻ6WzpMUe֩=q8< E]=DI9:ֿ& }B4̪<*2,\؂u#e'iPs5(eFMV&{Ú[ki@4 W;\t!KR;甗:!8/H臩]~У$cC jLjQ'[^^UjrDL)NK3fMTQ&LY +&hDdƀ)_)hyh" oq H[f_^bnwBXm\ Qr-15W 0s.̪+₡&"sbiZ$ڌ,^I;_-@&5֪wZD/" X}ay> R4qIILoYLt7 .>={3%Pqx,_]l[0T XEScTi'>YLEdAc ;!s( b if='-j]Pp=ei@QѾn)pP)^f5#@zޚդ3%GݍP&CwxT`gVrtkiGiYz׏MjloWyoHAo'M͗[t37_sv\_ .558 _fk_/U 7]Xq;^VdbKnB%(؞4 L,0V.2Ht>fbNy@9ʻ|mtjWQgg!1mj)5ܐmw>|_?*?c0 -Ϙ4仪:{&&gn'N]^ o;ihwMbV2 T+㍤-ƭ?܍Ⱥx؏G4[Fqxnpl[.cmWd4ֶ"cdOSJ4LX!CTrRZ (lڕsm%SS_/@XH6>?"mektŠyOVLdُ9BzsV OK3[)~NoK3ςKOw XǶYM6YpmM)+DBC- dw5<(87ۯ >r y litG-51Ë %8X!0vB( FG7|c52#e4hd<:՚ւӵEf.os:.kpBkB>ڧ4ԐupT"QI!KNMP[y)_c*,II&=F "0,upX7*]~ㄶ~,hnI`u:&qrK ̂b`y~)L,^k" & YIeƫPr0W" k3dQuRY{' x7ޔ"oBFu{SPXsmwjݟjŬٲy/=l߾}fl?OQmÉOp|cݗw4 l= ˠ8t$7u ~(s;Ca?YwJœv3 V'csuf8[uAd0YShCq5g 4》 M%[zWM/s밇bs>\= VՂ1$90-ϸO0h#J$ȹ=7dF}-{H,7xMqs_gBo/].G0:S췧M1Zqƞn|CDZ qJ pZY/]&3.&4kFtJݙL5ӿnܝ=>>:?~8C/c̡&S- p (|.7{b{o󮌅u(N;N[?b;Wdѻi:lma%BY}Ľ2qgBuߍKCh'0( RaPL꽭AQ`"9yTl?wֵS S Vx˫ 9jֆrxJ G9z.8u˒R cֶݶepcݵ ꡑu=_願1]gЀG>Jq#,ZY䨀Z MƂ=a32{1%Tz2`1%J+H=c2[NU W '<=YkuL;sℿ2wF3sofAgS},K'ny~0dJzqM'embϯȦߕ*Ϡ)90V w˵ Vzyb2f Rt9-ÏULLeЉZy 2L]5FcjI=B| &Rry 1 cl_ /?6Zm, mChժj|RQ2l}I^sKr}IkRؑ2/xQ[R'r fI9nՖg< gܣ]uz r FvzʜYY9V,2'"0鰈9xɮº=^t: r~!nU0 (z/G+1ZyמxwMDRf3h/?1Oh<NV-wRѱS*=pBZ3q;W<6''6燥EX#Хhkcl\w k81\0 GakNsk9h+ ;ʚ&'jV΁x IZINEn2K PĎZz7ò{Dh >,˫2,ru}./P%%J[A 'YW?Y $ak uS K*Wj7 M:58ֽsOF))<{: zP:\ՠ>@t? 8ϝ}T֚>Uv~S8ju%TЕdeč6> V +YO^,̪#(x mŜ-O;sF|%?ş-8!)* o_'T}4Jh Vʚ|d*r)2^e7irf: z[\rˣZ  Nlm2ɇ{qT!zӳTz2%'e"Al8GNʘb-lRT/z`lV~JX:wV%033p|-J3VZWOnQ:XWJ 7"Cwj}jg\+n,*Z+!<$ "Јzq;=/2&!;,~e)[d9[%9ܹ |YSg#^)]}SJ)z<# 4ԂU0fJP濅],[: U"JXpTBflp{V=.[ɹF 53ag#J<~o*zksk RY}/gNͬs*e-85%Ԉ2>6~#q=y7?~֌9X|HO5>֧gY!dα(`#LtvJ-J"JL%" a༜WRn17xrو{”f]T h?鏿t8IC,X'i@Ak^sWB+Hf[bM51-ѬlU9U%\p+mXfngg @.5|ly)JJ9xb̤+8:9Ap> c >6.;,*+tx|t}a&2"!+JYsP 9{؄y^ |.}erChfDI :3 #ŷ'A%|BOņRكdM @˩U5t_^8NhJ ^Z)>l-Zv Zc'iٽٛR dwbZa/`WRѬ`M,X-em\D+5g!juVmZQ1 5ցqe\ A\2'2E}pwZ{ʨu삖6L727&9R\!XleԷrOw,ͻمgN4]7l_7m^b2V>SnSoooowC\^{dཐ$ F̑kG7y-c"ژ@ ُ ٻk*>;7fJ|& ?bӈYp!hV#m&E/0BG$7|\HLRp& s52ykc2Z"s#.F Zr#\}aaNa3I=RIi`3K=5K,:蘐kH``>"Z'R5 h\|9Z'Aʁl]lKOgIcnݏNѮt䮦`3&H 8+$,2$p(tM8vDO(;Οa<3;LC WL6J# jY  IR:IM4z4Q:9glHJڈ"xk&fQ[5)Vځiޘڅ5 -PD:ǭ55YyʙZyEMP"&f xN:Ur49dM(}ϡ|ȷ{R7_^?߾NGr_5޾z(">5͏[0C~16J$ڥ$HА8|\M(в'b9n63Э="Vw}c*b22.}wW0t<, ߿wu|qo豟5 TxwmI%u=/UlgϕV)N& $n(&HeREH$(v" Ott#˻.8ԓt`^wSe:eV30#^;Xі={v?B *$;}ayjjHԊAz biMxw`x.,n; yRp˄TC a.(u*vLXAqۅHEA m?j((dd-A߆cXeheXia14/C/Pz)kj#Am" #b*ޢr&8ϞH&]j%ՠϧT7#8>xR88XEa44 *?ch*OES0!8$ݟrWx+7qE,Ġ-x"gbF?u1j\۩Wko;lyg\ZR|u5C5+ } X1'G-"N&ރ ~k6VcG.!z1#⣟]KfM\5o4k$thIhHϔk$ ; <2Y'%2LybXYBCr-8''v3I('*$XY8;Dhes@֤߉ p\h9`TVW#JШ'XUiHOh QS%qLXs$" VгpYAYiZ[nT52c8W6=m#مj&93)ޒAFa,32$HO$H )G{L3DlKf^n>TDkuN^D ~a:4At3yX8GcwLf 5*#d\1z tJ!Ж$窦x`._[l%)wN(,f:\BS+̨c= F!'c8'`$.|J6|@%bŹŠ3'4bN${0A7K§O>.\"L9t8o #f*큇J1.78-~` 6 rKt`k/+5>=WQ!aAoUӅsN%[ .9uOS#dtsnEX"52]@zVXvXsRXk<_,K;IB /u+p65p)ph(,Mҳ1gP̴PFK.=VG`αu5&A(ֻK$A'1l;4e5_-ݚd L?wǏuktQil`MJk)`齵`}[-0H`Y ل0K VıSK-̊_gKo|*(>l#ee?Ns?O@v:_=^Q+ xEhoe!{N*cIQ _nk}_*oݑW!S=*M{<u63£w sEE=X>ҧo0@ Ϧ?_\/Y?/;w,Y sZ8Fµޅ,h5-A"LQahO&Jrwp4ϧ)E*zRehũ-A&[KR┢gTxtr̻İj1Xo"L!Hzm}z+ >EML}?!U^8&pƎw{0)p8'A ԯ(Ĉrӛ6ұ5 ςOɡom qU'[}c[C^bC3iHstAuS0lMj.R=Zs_ʀ^Ɨ!K"+|*[FmI"~q /J4.6/J {%QFa?Q&%l0` 9?tdVO@k'w}8 &c;vPw~>18Zx,gڐ3^ 8Kҫ5@^_ёWoV9lHa( PW)\hf3),LQag֋Ҹe 5q$`:K#;Gg`vv /7(zw:x^-'V:0:<?E Sm:LϺo HϛaxFsu$8 +]|L2\@_9OW_}z㇟.޾pgk,X~wo}O.>?/˱lv.||0^Uv/LpYL4ΞgS{y2y:bϦѴکDR$\ML:Nv{wW$߄0BY࿾O 70Kov0K~~u܌i&] h0ZG /~̭.^䌿  WFM(6 ؀UfL'h7Itw?ѣs;\`w08(w~R?M'3m7&G?HŷԋUeCD˘ղCjaْl}n3|Z֪VjUkV-fSY* ),߫[34F%B֊?L @"5|JD'G$RSk{mހi`S"ǯ qb=NSBk^] ~hrWXN%Ʀ/}{ ЫGxMDZ R &n"Zicd4sJl\ vqJz3] M˕}~"5&WEkuH+DԄKYbu_}OS3gJœN6+pt-d6 sgO 7:)R7 KG jY(֢S%b] b{q3KKP-֐Ϸz0v!7\ yC|ȗ~햃ҐcGVL 0䨱ZQ9t({m`J<] jSܭ2}{N8& l_L~a7mr;@. rum # ;|VN?-d0Q=s Ơ!>xAM̑(9MDZEeQ*mj'q.p]`L ּJ)5v}dPJEVT\e @N[m6 Bt=}{&.dTgG%E ѷ2ƕDwD&q(g[ysyDYns9mmXH)Y,|~usqޱvHRm?Z1L8JP֧Dt WqL?-ptz}bu,&ͤFa#Y!ƒRN#Di of7ߝ =flusw%0%sVko҇SZKOWVO5|7PuuһQIQy2(%.\$8!gzHS+ 0X0dp\NvZ9ҋ4- G=ҚWTm`g`#n[:eV";6?`'ᝬ> cyǚ qխ}~!u.~ɃI6<_yyUÓdܘa֭s NQ-6\aV#UQ)yкB:)Q_O'%nRΨ5P9F*]I&@9:D戊uBلmu׽Uj44ch6Nuv{Ab('$g1~EX2%";KۚfrZT]7LoM5#%DJ+\ NKs@^邸Z}n$}0x 7뉱Ǟ$}u&cq-ib"c)DILٕ"WD7o$DYkTB2Nxf${F*Nj/b(SB,iDig\ O4 by|79*r i/Ey8ڕʓܙ JTn8s~\ C?[.<8بpO\R Mk%jIjX .u YV1%FES+,+ƾw҄sAkV{vTYmn_j.q%X5i*@]Mhq͗n׶{S!vg!_$R3&L vS/j8zďAɦ2}0QVxxQKf0b5ZwX~n 5#I%Piq&}QKCCXz(kqv8Z©wn9*_5S'uW*smJZ Š=L4[9xvOޕq$B;&!@K6Y yJ&{y, 7ͮYd,)P7Q_DFEDQ$szY5 k|>n5PQ+E_^泑l4tac߆<$ ݟm,p; ; ,I/7 AdH[(F ^5Z[PAj'<R_?_Ltb' }a1x9;1C(54C_y񓅗iĵ<\P>cz0⪥&ɒ2| M+ ̋`3'ėr\*;X[u#V. RXːԭ@̸ P檰NTD][>5s;3:2M<4\IueBW0x{v1le]"^.w7ޏ:ލkrm/C- ,o!V;YvY<\[V<4GzY6YqoP`qP&;Bi@PR^  ;$>8i9jP]ƏCsn#'M#5CpTϿ||>#E2 ޿_%$ (D熗s /s]s?d1Y>QڛR,MbRh{6 0$I q48x F#ZƶO= GAhrWӁSJ+zQ~*&FRsDiJL@Ap<;(8 :`Z$ZtmE3R@+rxҫYXLB.L;9dxGY(g%)g])q \i0k<_ ie< ڌA\,JC"4 8(  8`)q8'֠c FqA>$PI%FM.ܮtYt<=iڳ}z O)gn5|ZO/*+qޥO J$hM稊h\O {6S ǑpIBrK (:;# Ƃ!EB!JX*7MÃ8Jv΁K?,Z˫{~&\>~UFW>C޼~&wA6;`+wn}#n#K6[ |?=^32KOw&K/c>6x"DXo^(0)&3z[+t*s Ά6.,j.a. (C6 Uc},q*,Q¢ &/yI9TD͕4w\ U=8â3wrMTõjhLIjN'լAlHc:K/N̚Sm"XNOv&.3K˅g.ޕY/t%RIbՋi 8'Kf%]SF.k]Z'+[9fM3UeΫDž1){9$x{Uf[gv2ꪚ=\ ڥ=()\:# +}gCV*\J”e [oƎZ 8ш( @Dqsf د{{~3 m&ZHƫN ny刂H|y^!/QeYp6y*qa[Qvwѩw:$k$G:-f:Xʤ'Y 9Vf$䙋h%Ҳ:yCaJH[U ʈN5n|Zִ[jIVq!Y" ΢/;No'0!-5:,%0",bMf&Bsr67ahKMıZB b"WUC wY\3@li|v~ѹ%T`ioau8kv7}- ]bH%f)){2l /X>oGT׼'9'K΃TUUl3N?TI G(D]<`5|HƬu7/n\K"y(eTV |bMJQaMz)!Srt}O`F_$U dv&]'KpMLƷkLL'#?٩Վf(esY Q9V4 "|7[oF?9AڹtM5\y{)Ћ- |jfU)wu!,?.I4g )K@6?1$Husj=/^=vL}).XE3f5bo޻K:ڊ?k\8% UAPЊ҆^6֨1} qS`?ӨJ`橘^C(9o/JѼ})bC Ds!ޙm)پǤf,:R熻s~})DYp^-׀ jA&%Fspa+kxڊlCEIRE-[ؾ}-jƤ[Urj܅PX' ˭NKHKe$ T:AIˉӥ(88,ɸm?|czV^VFϓ1#TdsgBZ Nx* EfJmV7F}o2=n兽8\޿y`/FCb􋘰ZGӋڧ?<QRjmyRѩ$MKh}2a sg5N~*_)Ȱ?8Fh?FL^}OzTe)n"Kki%#z_Q) 5He\4򄂹$dhhI ̯<9b 0i,%1(ʾQ 6$A!ވ>s.puhi;:|X[r>IXQ){ a|_f`A̜k\<`PUQEd.?Vk{_>1gr|N~)[N~.#ELFH1Ukmf0U,}`Cr}[%z"0Z93u5"3ijW@_2֙DZ6UyO8|aU`߇F ݍRg鈁Cjg;z:#&0^܌K0퉦]x|PM˥V5S@#?$7rINJP]Oɑ+W'* p\cɁTBI)Tϓ#pkîPPL>dQ\vQV g0"H`h0Sy XP"Qʅ3Ѣ2Hbp6j-lR9޻8ˌi gE`?/d9B޿?ޤ.1t0(~02I~dŎ[[u"((cn:Dn2;)@U!e3͵&F'5 j-Ab&vQi '!Ρ'Z9ijѴ A~<}>E dTKxjzm;rxp4f4Eٚ& $XZUZym4lsӭMZ6A[M$ `׉CX(㝓 #f(౵T°1%ν@035g@-NT1JP`2` R;;$EU*1FT1RbPͬ4YLxL[P'Y/͂s'+h߉q jp)EIb*/Q-'afm3 ˜Vp`c ф#z1GJ@wiiibڻj|i_ݼ*cmIf߇b4., aNo4J26 0b""Z o~T%u\o=/lrʔ lA.PozfZT@z=U>S ktF;FvE `'_ %v:[}p{o%%;dxAx]d zFO5[P퇡ŌpeJ"5oմN^nuFna3$JPd )bac{ؖ~|V*i˗YMɸ7t^!X7aBYF98_4g4&QP1vۏ~c춟ݮ÷{Dwid.BvJ)DU!zH>~z!\A@*Ak8ňOaÔІ,76^2]|Uj.)c F/ F\cX K BF'1Ťx1D<@)}#~>ID<;P͞lMT~JEwKiRgK 8 oy]l Q4ȃw=JNpn`s+anXʾ'Zsֺd%q='VYv^m.gHy mkU)R3s$04鹾F̰Nؖ^\kOM)E0e3Ң."IJպ0%6Q965y 0!A2YO2%Zn%Qt{BX r&(S` τF(D/'+F9xZQysCU8FRr9)1ߋ{L ]3ש2׮3t#;bAl)+}|Wx˷W|{dEu_^C( ̕P׼?wm\luRNe̅zH&U)9SΙÏnF^tՖYSkc$ Sp:>*%9j)x/?seE* 022b*jOQ웋Nqv$TjBX0~chXn O\ ΢0.τB pQ7+nxXy| PBWJwm*_Oaa&e/i.˝ݫ`=bwaY+{@t$۶| ľ+ QbJ4*l.k1b[>W7u##PlǠ!6:(SDѝws%[׆dr:|gk+^ t'Džl/><B)83&dpNx39>௔=kЪ-d>8LK.mh%y}˅꽙=Ruӥ[ Gc{*RXQ[.xv^xa44[(e>UgIn&Cx)`t r|z$S Mt/M)xDDxȧ}0Mㄈ Bo:&4ryӟ`zAy/*xBݧT's8K[!㜟at3ZА'l$ę%4ȖN^$lҕ`9#r<3iV7Z"h݁f嵡փ _fZm"LX wCo$t|!XZ(Uh4!DB`y\`jkqaK[*[bD||9ϰLS N`sa#ti:kU8LP-ۺ[z|„|ʋwxcHco+q{ʑ&%i$%]x~mCJ|p(/"S R惾N:8%$TW&BIM pkD4Wç'A?~0~?)^e]?4UΧbߕIk9ϋtO\(y:]\r_hoN3KkS%3iy±Tt7Krs?SIMA_QF,;7R66=(bbt>w4)[n_4wBsئ#x]ZQ"IB$ Ʌ^_Z=o!/QvʬLQ"y[֌LӝyR7n|ŗ6FV=Xfjxe{5˲MToi#҄֝kU^܇_z{{f71٤d6>f 3QɞقԮђ~ލeUB<jH=>i~Oؽ[ B~5< p o UGsEb"P {w c's/s=f7P婥0FxL;YnN 6l{LҼ(e?=9lgvp6yʵD~6yv״+g!t;`sr#LN6.`a(A63 FF0"K!eV֮e(]O7=BM+r\sU!OՀEcCFPx0 \m= )XF!ImYD\VP0A%#eYhkՑ ab" !a$q\(d< 1pjCd2:Q1k!(GA t@UQD% X[=q5X!'0\#h" 3\5pUb#83iA,Ȝ=r 2KY-sPن.Fb:HSa$R9¤C.rx)QN9X>"(ElPR%!L,.E!®50à 1C2M0U4te篥 ǂ(ij'9!P!!BrD#%F"(S)- !՘(scSoc萠EB'Q?eѐ3e L Yt ǹRE$лq芙c5A:Y%b_ JR3X)=*`A')7~Sxn_pZ[71n[ޭgϟQ,qx,"z1f[d;k5߁'[H>R<-uU8|{F ^NĿ \fFt$E{W ~Xu+IKao3t2 {x lN*IyORvI7ٱZU[$_6an#u=%%)ӃG,2r0Vn}?w'F`,ʄ04#\5C- $z05Tzy~4Heٞ"Wݣ/οK/kqjRt{j}.Ix@QI1q MV)vr> mœdղ._硨B0JȚ2qwc.ԏ:Ԥc8[@ \nA -Z{cZHIC1=`iz8FʳKjD?>C|7Sd5Gb8.|孜yxo5$@m!(9}Mp5}_q'I귱 W>߀lnyƬ#ӒORr&c=hmӶNg@9G֔ ۇȚi-'+(' 4qX/Y3>=̒fL% &l5#ju-ϱ Z3->8g~&O7]~bnhVVψÛF5x?-'S.gϢa"a㝋m\Y]6]J38}хǷ>mm;j3i7l6TiNY5o 4ז3(1jnA2С๙DKʹCH UBJO{8'{u6&766 ~oق9ĠI tn53a>[Ӻ`| A ;I\iB?v[pҴI>m…^9}wAa╓[PW?=a贆m?Wݻ~N/IJop &%>O/~˳_N/qo77J+_K5!oBp-o87B&BWNp#BoAZPSߵzi"Lȉx?Omױ]U(+P1=|A>[Os }ҁ2/yojtkWI-w5}ww㻗n{4Lt߿~1zr?'홒ee_ .9/)ƾun[؝&Z@ZI]^wia`0#!-puڅc=(<7Wߟ)^\{6qyU(&bb/v*嚄K6T{]etޓ2>[,I_Bo/gx?Ia+/$͛˟(~o?>> $wiݡv hODm:]^[3ǣ~z \W77;n"/A5nmP~BOXK*)^AWv۰wWMj}>0"n %/_p"뫁&}޳-֏!$')JG67mOPg| W=w Z8tbqF"!d (ww(x)Ee@ȉW_,0ٻ7$W~V+Cf lXB<${}#YlvN4dI-"Ȍ+##8( 9kNF!6`wH@@N~jY/C C%CL!Ad)n%,=7޳`$رu e)÷51:Պ<8jdp(RF2mXڤ~?W `I)8ɚfkOZPJyi~Bq )ۢS"мOu5ˈL~jFsJ|N$f ԣ=EVx0ztSupH͊]a S*9}`J=T*!6z]V#ڣԙ U|tAԢˣoSc<Β_3ߖge6<7x&.b5Ht[>.~*ܫ5nKl^tc:Qcuy[ᨤéV9y oŬr- [%EţQ+Y oUuXpt>G{*~ ,Ѽ;,t[l9Fեv9D]9DelN {|D\rW[-0A.p̵Zbԯܛ'7bA^#dB%8 a%qk\cKC  -% L"`SUj?F߮"X09/ʑ0mСb=5+_ XtTIVxI[_>G`3a%qIZZ!W,{bm5Zt<;u;ksVC$k;LK&m9wĩj kt'ꜟ@\zҿ(a|d# 1fixVIIo յ],wvQؗS)&?_OKbzȖ;1FӦ]Nxϥ|$'o^UjƮglWi;v=[L:=yA3$pF5jPJ-௱Y &h&@{R-׈m!b5-hv1y I/(Pq]{npxт|aRhJ}aGd_y#ٗ#o'l )Deˁr2ڃZCG Fp7GX.N%D|,UZQ0 li2Sc%B9#, ONQ{ 7p7.SoHmbߵc肐F@Ar΢iU@ B9fJն)Qa%9&c(9W=tFVJ=,п lS|* VǼ̵\%o'  &amLwROGQtT1/FB,u$2(a@]r&@QI+ 1RjSyؙ_e^E% 8qR1.IctsX/>_~:JT ?L:-Q'9y Irzk}wtIC^'7g8u n/C.w1o3 ^.0dT' ߷ }A+86oXi Q}R#J8t+푲,@?<Wrh@<':ȸ0}??e7s#eI`' wγ{1i9fIjz/&mwrl۳o]4R Pc1>dgLsR9\9eB ~6ԖT- 86832? P8M AfœW+@ڲN_JU\RaV*N6=9i%Fo><^=ƊoI&yMY)e͝??zZ=9F\rf2A4h<Gx65gDjN%a(׶#;fV'8 GV poM`$nΟZ?43&B F) ViAҰ6b洵 ŀv8 9 v1^p̼Ȭ}9cAPT[0=& CIbAev2$1fX &eX0,W"F`g$hRv6|w3&l&yQP c[3V9SE$5p\ 1q:6kG: K_#/Y9oJ&`=}M|G? $="@x+4x2]h4>_dFg) FH`)ۃ7T[Ho\]ePP'5äQǶCZ 9O551| X=P3eW 64 `pyD-_삳s $]j}ͤ TD*jKj0  KB.* rCc+ iu ܃:TN{ku^48TB{ a(#]7%=1DaT@b8x9kNF41$z1%"ISrxhAA.toU U%`q6 ؇v(g#Srd'o9`tsU:A~U:盕퉷3!}Hsh{UXȂwB/1]DKd\SYL%@K̕q -= 8S5*+65Bxvch1 s'Gy#w?zirƍ$=RRsg`tQOwΦcR9ц9ݫ9̾WS>9ԽQ )kym>ޅ2.eKB{?L&Ow~-ݛ+r".V<-쬨d\8@ff?%jE/ e *s1ia> O'}b`?LqbEW* g4;!+`*߫m3}H*l9,Z0iyՌTRj6$LFkQ) 1TnNj([P &q XlpBhGbFQxGH= EfaT*|Z`.CHip8'BPysVQ"6KGlȜPB dŨmKd{5Ͽb2V)dnq)VmLV.5AZ=`,uTJ;% 0 "1 mS2qBU;yKqN%FtOUTVkZN)LRZ pd%@{ޮId %T% i`ҠeB@Lpp 3&6(܃*A5XQL_@Pa]G;MS@Ͻ}A⬄PrBiFnEŽwTw<}\,7Ji!;9'˂{K.uZ*oW1E}TT[:q:{qBA,ȷP'T hOMz`Uk/^+&tVK>y^U*ԚV9_Z W_V7݄>3h:w Q,Fŋc$CaT_J93g~ZQ?v^]hRKv-svZJϩ)PsJ-O%iP/DAdfuP%נSPcٶX!<m~Jon~ ƃL65c;U<'[Mswr2.1 XR(xg!U LbEc/1L77vl8K.n~ [-ŜWM??7/90p~[H|;mJj?Pf8MSa@Ɠnj ^$Ne'H`1 M?!<eArtq#ca6N)\+RPâ"*e8)ub6JQ*PXzq/9͌#1:4$QmIИYQQº؂!H D&raU=h&vy$ $v<\! ЛGԭ s qG8nTR"`C(e u:OUDmf,W}_v˟$׎ @ ݷ -H* J"cbE&q`iYBDXKs+ZqXE:)JeJL%, I541&FfBkD@ Ė,H5p$:+A6P]g>U: S kp"*5OD*Efڵq2 f$IT< ǘZa'o5u؃~vœNa) m1%7!`#D8p]H9@9򥶿BvPs,꾀_aYT8P^?N6UT\,4#!Rܣ ;R=E*!|2Ohɣ+<㼘"Opj7]:6h^oC;o++,k/ssjSs$TKTG!~ٮl2?/ ̪c4vhMp~@ډh6Lԕ\ï4).v.Υ9nA8W`2Liaz/)εBy^uXtt6vrFDh~NBͥg.JW|lyr" y""S7BRuBb#:(KOoNzj*$䕋2Fq\-Bkc$^c;3|{Ԅw\{8%8sJp'}X|Ewc OR$Еpw;4U3SGr p.y$pyIaAa-̓1qi04Hez 76a &Y^2l3Y?6׫׺ .%v24J~~w"`]/.j8lB]{o}rwS c`Mt#<-  `one-.xS2VR!]KQ{~Sۂ Rh˰ MYQДriv`v=8&R091#8O7^&MЈ:7Q.2ù(~&4E*!\[|B[a/W CU]:rzmyvT[ҷ ߢoѽɗ_]hѽ!Еaonv?Yxg.YaO %8 ݲ'qI"|+5-֧֊T5R=T5mO -SfHCuu:pg@ >RLHpi]KWRZV;9YohUI*0i8 Ofl4%(6kۿ\א>ll}Å39,#HJce0Ib$M3|;x)B3XAD}yh`9@V קjp+*̫L++\au]<5fi Ĉf 5F4J&@^qL:U 6DfEjO׽_?;~;NK'.?TPr*qT30~{=YC/o6&Ye-0t¹y~\u*y4qM d\g FoVLOzڡ%;V ^HJƌ&f+;c8zb(B]̈^[yA9+vE9ॴkta{03x.RkQSrI-^5p;྾q P</?QFz+rn^}`P eQhxy 7d(kI}8--WN䊡JhrgҒ: HNжRhWH$hV٬Q*Y/'U.R DT6ÖVw7kaՉ@-; ~Aƣ!|/yZj*my!(%=K18CSYO#Bp,ř9sTsLXIm\ zpGY}+iU'{gGsfɱF6U\71/$j6/wXh/1:gFHV){oshvv<Ѭ8T@ƓJoږFY3"q <63zrl(d+ZY?v2]]3&'=.\k/yw9mŁ*+?{&Hv%W.\Hst̙t>P>>{4Yq"^54&]ub1'6uKd+Tq*yb)BeCv}o:JD|ΊCJv:dś!3bda.ٝHk%?G5^@ %Is;4/xE6%r htJ䕯;y_\bvpb/jf/ į|\C%֤p9e@5͜V&WQݕhL&Wd:X^.r, ʷvxAs~pkDŀ_C JQKyQ^[xA]EVK }K aRCtA@g:BsYw,>Sq}~w9Mq2ᨿ .%v2WzQ;wFJhTnG0F3 cW$E6,3B+NNb2,2Ui[χ[6AftmTDC^1"8nCi`0͟M?C?A r*Y1oe`FX7f_y jTLD )e1e/[ۯ G_ |2 BG=cc#x`Q +XFfpSIt8 zk4B7dnon2bӘ3dj22bl)KmQplDX&)I v^];NFu{xAN\1liO|fڠXi7RakPjb$ĺa Ie* 2AITBLI)&0W&9F&EP2$V$S, [|1Č\cTfeedatjޯ OK0'潝d]9 gcp$G ~a45ϑsFט[}5?'ק~6JFg̦81Jc)|Р J3cp,33 e6`DJ4@P1pǩ:o}Dk7y;$8I%8Tb8k42!l&eIep`a+羛d`Z8wGl-BxK‡;B˗ۗV8Wg0x(zVU Ozsd,?]?3'OGό==;hs/Fէǻ?a5!jpܑK_PcE*JIRUсɣw_Xn 8[3>~H_7ޡin}xddž+x#8 5rQgOm<~p6kЮ3"RIlcg)u-b&~ S D8 _kpZ|әdk-U1\ ʓ\B(-$ǞyL_'4IE.=si))"|qqۛ=/=fޕ$"e1eUއ?,l}̼tVIeIv`FRGQU"D U_yD$|Dܺfk)TJAP-Blƴmt\H6()/2f5s.N< !% TsgSYa 8l,|3dC`|g?W{{Y(1fBLPA%(!rc[Vı:ü̱|0褭7xS8^[$Xa0STU;)?p8B-0m\pdDVr gVcGE:0@F=2fk\cŇ,^`l^1ۼUXhW ayK1pISu~QDu kpC3t qɎbL{爾H3ewPA@kNEs{ÝoK38昵8uOZ-rV N%a& oTlwnRPl,s אbɊXSBj^K#!TDzsM?#&|bzqlvӌ#𓧠G[IY0g,&W,B0=c0i{Tgth!>jR[+NCpy5yL&N QIFkuF>Z ņlvjqnWM _Tpö 2C3 ꥽UHK\raZϮ V ?xu(quA|xb[:vY"^ozfxM>2n?g*Py67'x2{tPd/?=97xk<buyٽ?<I^{c1is`/;9Ӷ?yzys0O#5>N^י8 G+kN+ 'XkG͞zܠS1+uM\}All/X@|7.,?\%}}ގv9mZv9Ƒ݃>sMhvRq`cDsm`"*7cw]1voݕoM#%F9 R!'!GQKƭɅccG8^ԁLc ZNI6O)[4TA!BL;EIOɩ͝DYƃ3!yx a$"bcƓ(r* Tq9b98_+[)279@e9׹{QeQ)H9A"əYg! >OCu6,s8mԸ0a!]@MDg׷=sR_<Gd9nk%<)[X#!^e"ty](UͥFDCPm-^5^b;M9w iq;+9ǤMV]>/IDǔLunU 8wR@,8R`uRχawǻXmH aH=Ob An) i>O޿[}b9m|'|3ADdg  ~+saTH/_( z])gdfcL84 &P, Wۃ7T0 ,@̊Cpxv bCeeu թDY'@ʃ+^"o% [hI- !?ga(|JDHxlBYD( x lزy!H# 6BL*H 4ϕ$9BRUd(]&Nd 2 tmf4 @R-s"!q ',؜Yǽ!H d$)Iu#޶#-#?8UlN)F` Rbcnè@b" rlXJs+.3!_To^Mwfb͛W/D( |]_ ')5NA#\h|NV͗Jx rgmݲ}zvIDrFJm,pM(49 $ȎY  A3[EU_Ť 1Lu^T 0-e:5(?Q?7@= ("^]+&(Dqn"_$N-L6&1[ub6Ù"bse I q3="‡ < J),R'9JXL&0SSZnփ6S@VP, m+6w; `fSLkUd{ 2 *7Az]A9\k^ba(Ċ~-r4V0!3A>bɳ 0z ~zwܼp?~IIPE4;]utOdUGGƲ"dVrkuKK)OUpD('} w=9'+h8XI3s4Ϝiσ[Db+n=eZţ'] f!*Z*FFg4@!xigA˾Ɋm8ODOөpll|slt -otHWnݻLte1+Cħ HӴyqOTpN4oa$>5JWd ]Ѯs[[b}XZ}: m3:Y}:g~_ }tbo쿗O'eAm&ͫ[4A7̿!xl:^oW"_#g6HJI&)~e (L,rVgv3o`tI CERqB&EM3l_5]O^q8-9RE]&ׄUMPJrio ٻ6ndWTz9uvkHRiSI㜗r(.$!)rDrD 3|nPX^rƔ9+bp~q^Tt[ C]&(db;ȌfBVw)>G:rrЁ_~:Yϧ2 U„b!hm3 ତT"o.  Slfcfe&$ѲkfPYM'q4A N%og)y_*LQ:WIUY9fi4Z)p̜HiٓHSX)A"%ɽp9ه d OH :qr;^4ǨA~r(:R//'  [OB`.פ`f lGeWIn/7/n|`wļn(9Mj2 q?uhK@iM^y{d6aPc=p;qڤ0[=c]UdIԤVHx'%wxa^ھy]F>ܞo[ڪYY}M{}~E/fb20:/@T `DL=WnE]ZW"PMGŐ~Hkg`@o_?}nf$])C.]d㐭2pUc4֪hR;t<fֳaQmstUxK3o1UiA{U@z/ˤzWb*KAFŠM2^(-I-jW ̱CY!b'`w6îR#QBl5 ̴32  4 OFBYoƸcVfw?F?(DyhF:bwBp8ŃZ ý !IN D*`f'{_/ؤy_g ?.?wz5q;}ARY^q 7p,*]j)47EhVXN DK$ 2Ļ*w+@W+C'dw$MΠu hYr3*l*tqce 2NzG8Y+"0,Q85(q >ݻRP1XY$@E nGHoy[E2`.(ܵ٨C&cv )2*-F&S+FMKITɩݹo&ґΊKUkgjޗ(OZhީ b*>!P-w Kd^~斦T0$^`MAAcpsQ|%OĴ+%_s U4\INQxT?X8X礹Q١tuǒH9[쨸% 11D\C) ZT%l!k7IyYKxOm"^z]ҁdŹ$@IԴzY+i/Ԗ|:U^,D(8ϋjkFFL8ʳM|aM+φg"p t4I|0/lg0U/wK"|@wVVuBoN'|4PQ"*bߧ;GR3r_>E|I1Rl U8[R?X 7gR QP*H!9Z^Vx)Ϩ滙~A!_?A1Hl (Eh,R(k In1;!OyF!?oAu="asck\w,-yIk@!2RJ40-k OBLȗ=y:;+UsF9(ͭۋ5~13QIԈ[x Ss51+^읣3R&~˷yUv+V|K`F Ͷ9q!4pc$.GqX È*V @(jʥ<ʥ5EɀMk$})c ɶݲʶKyڶKjj-o &υn5#x -1hL[̢(&z7Á04M8|1iPGɃ68VcDl8XBk [xe3鰓DZ)M;KT#rxtP˦t„b!h[%aEuତT}fI8S>5Wڂ Ғ)i9w8q8Ji+%m bw'Sr=VI"o LGd/N 5L:g U2qlΉÐ.qdêν9' כlv>_MXp qo⊢3 ؁L3&(˼ÜTw,` XkYj,s&phl*S`#2zGSrJqtON:C:Dwcg^pDN8(2Z.k㣆 Zىn.0!νҹj5i^D+PL8kP\LaGoD -9E!:Aj,^jOF'囕-޴[ ݧ/(YyqlgCp\FPD9N3WNkh5|ZN4+ϝVvyWik Z#i(ΩE;֐'< yR+9AFsS͈lO!6)0׼cҐjȵh\"ҘBM&}p'5ᡰz ok"wi$B ikElZ"AҪ)"AEl^\Ⱥ ! MbV^KVN>Rt!D8V:qU[64gՄB*u mZdkW9V6k)_bXޫi߿!QޥSs{s1@zjPp NAA_Wz3(8BNSAQc޷j?$$ C2NG/LG"P2wϯk=M{dmY+5Sj!z6mF ml(tAcĐ JLHNZNXrr@]p>+oIO1XAI.-$e wk%%C o@ʣB iyEVX)I9@1rA\D85ӀvB#(4 5cܩw y9/̼f~N]5<+|(94==8%P12 |Q"=]ի/avջw^ *4cl ĨRT(rKDHq r:-9\Q$!Zؽ>ɰUf1Hm.^G\"uX CH"QAсۨItc"LgNj$׹w;H5ܲo?)-RtW3};fBظ;l]JaevJG3ਁ.c#Zb7|c Zc14k9ǟA>1h8;Q|\r6@NOK:C>t4⤕A1 +"Y2HP8FYj@;`Zh9mIL‹W],QD?v~hՈb?M,v(mDrco>^k/fEvǢ"G~^fm/;ϯ2O ;F&KCwIrh-O8m.?]_E9zWv)•=Po_{St"T"I5jv颕 =ЕaFM+F.EL>)J@DNhR$*F(c}A/Ɣ tOJbI <=K)z:KAfzfe{[yP8ů|_$KQRXMP LڲXe=% X%Os}f%khD۝,moqs/ =Y x!jOd=*3`p[\eN~ONU>d~|J~;O}F*Ӵvi"萎Ar/?6'C|ll0_]«@x;K4ɞނ_e\:F١|^A{v=mqh~sK}t< ~~^ipxě26*~問}}1^*6%gهߜfO㧨X:no߹Yؾpl ' 6>zwD^ bogqCy_$fDf=q TfT4=tڣGpB0hIrلƣg㾇r?]F!P\*֛Hr4%UDKTWޕ*;8[`Xf֯Y8| }'E->ҖXVO˛B#ٻ6nWY,v~ $[,Ҡi_my%I=HYsQM]y9|9XlL aF¿J―SmIUkIA Hq7χףw96!žȇ/F s_̕o'WyhGd1\ }ZjO^a0j+FpF-XbT .@.J>_/1h\33?sK$Zn`_ͶIҽg$uM ȢG|W+ߟ0`1iQ sXF<ψ2 ZKC-gޭxY}>X(_鼀VnjR2HHpu qz&d5k{jWԞ붩S[=EG]=o'DO吹=HЃ;ƁLy5GlDd<QJ@Vj8k5HbN V>ֿѢHA(>E/~Q|.jDDF1WZ Rh$3gX 9)V`$3j*8""EQ/?y5QġI 3HІ0;o4zPm$ ĐN@g>͐4;Ca@-//,i Ma4)2e.[d(-*79X+ߧehFW̺fz Kn7(:~*~5.):fU1.4IC+}HjJCtKBy9 ՚Ft $G֣mWBPr*{x"C2^;/J21A<;_~ q]9H%B ׈PJ~PGhBCK~Qgf<S@R(~-%ujW< 1+ȍN,RݩV23y-H3Tҡ;qZM j9~SFP6BB%̈)٭S"vyVK{/\ßP.)¼djaM{P#0 6Q!pۏ-6MPG=K mG}egN+GJ[ '}O|pܺϮG`?Nlj*O"(H U"=tZ8ecZ,NzSOzRJmDy#(5@a?54\T3ZBTOZ@DeQMYV!%-בm@0q?/L+ /rIXQF!I&ej[`٭`/>q7{gM_P-WlMoݣELN#D<-uճPUfpݰNC5DޟZ#پlߟnoΠ1MXO9V(ZT gZù C3"X[AAbpkvkiR[Բ}u ҘV+~]6;r[H߃1z;gW5+\dwW_=ޮ>?0?V#|7ή(n9 n=*<96[SNT2O1i5Rup=XOS|D%-GNxs N 4ŮO~Xg=ٽ#k=߾z5v:f!GDce<1V[)mlT \o~B:x7^=|w_ʲuU.>>ݘ>^=ݹpՓ}a5$pD' z vr4,$m}JCbᎫ{uμ]:BZ#^a֊ͧ74]W\E4.p;DŽbLR6fL4T0xvBŎo/`:'7A,xʿ.Bv A=,Ss@PXpѧfa֢(U[u؛Fjb5E\v 4A,Xrq"\[HID̤N(Vuܻ k Bd=M_ B 6ah9Q+U|p˟_h*G%~Ae%t_N$v-*jm{g ^іx SxbU X KӃo5I!"5#J*gvCѦߙuN*kll5&!܌0VBLqRy *Xa)n(u>>BW"BgltN%Prd23ĩEBhV$VXgQ5tuP lPex nBчosx* ,WOd(_DRr?LgB2ߑwo~W3%_c~8xADoҡџ㛋Q1/kaz}=W=fM`/Fbur +wl5.F5?X~bucb&II/F_} J5DŽ0u1 %E\ QivT$c HP)X>EK_\ |hSW+G~lկԼZiЅ۫/N<]&|XNS!FBwt0 1 p QT#$KBhȨ`;g2#ַp!N:2g<6fr0%ZI˸53l~S +l13—˔$M1e4Cwg nFZ1&Fcn q%1gaV@L*&7gqhIrfhqZA3 Z8! aʳ<7 La9)d08kS)* -b`yQb,g'Ug@YrVlfExZ杝8MhmWy6h. $Hsƛsň k~>h/5e2foÔ6`Iƣ_OhĬ`osbxW󙺲b:Pڴ^yӒ˛@/Q6FQ`鶧\muxիhUr* Ph$!y6>]*ֱMB #ەaN'sA6Gy2Ew z-;7x}Nof@ M uN)aCrN[@%QUÍP)|FP><

/@}؁T-0m[HۣIT}3èMg9I@ e?7fY67`K^,W7e wǤװ+ n cRM]`S rJͩSR$EE—TJ,>rrVÍ 3 qJr?J ʆàĻf} _F*,+քT"-ɮFPꓯ{݄J١啚1jEL5*Mgr!jDݕQ4f\d K1('S ٠ApMg6 :.lH룅[+9_o?3B3Ă*=vAȇp1~0m߸Зt`Lxπ6:-_)ZmyX5,#_jj̀=;F͖(w ogO\yװs_dN,jsuv|]DI\a_" y"%St{ڍ%S-щ}G_Dim_J&Pg.12E0|Ⱦv㕐>[$AFڭX SEn՟B[ y"%S@^k7Y9Qb":hhδ[|8yvK!!\DO)2p<<{ZcfD] [ru 6t1jK\JBMIkZ@+5ME?`t^/0֚&omL m2F3`e[a@[+1H'LyG/dqttjLd8/hu39˙), !d)R vyfܮ"_. eJ"yXSCFmFh\aQaTDΉkUh\d$%)X7-Y35JփC})`E jU&mL$ӍeKZ3JHWBh^L(aQ,IIAC&8N=Q $0GOUiZCk,Y6b+H6U=[WM0|]"KEbF1J^l> L(O6Lj&Vfg'|WS> TEm zOH+bzRI{b ZoCJ 1YCE/o" 9HuO:'sxD#8N4h醽kg?=c` Z̙̆Yc_Ѫr4'{6`Ъ9~W2ro@WLs@ :W #;|HB'Yc8uSgKruzLHMp<gǹܥTܥT!R;5'kmr|U)\ =S}{dw`ZK13iBU,Eş~uDhٟ>#| #>oέ?Eo_uo,k0QM>[ԛQV oΤ(G_P&2=x룬ݮ.QGßM@p eh0oN=0gH-zZrK'BQ+?¬7}ˣَ!zTwLk߫H%Rjh42o&dpy:њ\c`ٽ@!>_OmK*)OlZUFy^EJbk|9 S\)߅Elyyx˜sBf,ٲ2&{wMdXN̯4u!N0¨˵פuXV;~Uh;?pPˈB4A^lxˉV,حki Pu8+AFd(45Dd$c4- Z)GG hu@"ŴT?$CJ|a's8BrH27V.%'~AN9$k/GPEu>1K^(~@gT.Xƈ@ L1|Q?$}w<90nĬ~ ~й R;vV|K%JuLvw9u(#J :?qhPw~>TLR9y3B, c3"+onjTe%R\ zQlPlJx]qDI/'Y_t:ǿsQ}9 D*J>hWp' &7m!MnI,jibWnG0.6[k?vyiJDԈB@WcJf!Ztj4sH˾/.b˽k; EM #6n.{ދ]ga1luϗV|1y+>PҋkysݛP׷=*/(^/dá"{è?hxքlF;TWJAf ፴t(@(tf5FݱO+qQm#5[[ TQQR" ]wv=>񟮜=Q1眱](}wh-˝]|o+Bzd!g.\rv<~IT_15P`6G ґ,V\4@Df fsxW9ia'J7M{rԹ{,(׸qk jgqzs-YKhͦ*W;<'l]SMq;m΀ "D傎A)iI=[sI_i]mބB9;g'6C-Xċ} ;87%L_6|wV;ZQfak7z+ź l8Krm8^tﶙ! zf!JpϮ_?2FP}Y_|\m]; ay/9^gϻ~in~*(-ib0nx#E&G{u/;y d;anMzN5-\s`lC ox-h[Qm(zT7 H鴅 $lőJ-*U3 P侟s@7 _7֢ExCQJx3@.H1|d6N{%]rR$p¿uP]y }sdJd%Ewŗ=$eB-9K}eu^R#5wkƞ?ReQ^lC ;؃jjRQ|[/pHc]p`*dmQI!l䆴TZ6c+*9b.L(M 6B5ei;$tefv.[3}l3J&Y3y =[IRġ}H>-GiorvW^;Yi='qr~q =f#g=2x-Rn MHHzV߻ 3ѢqZ)"Zy؃vOKOo~ɠM4LF>Z8ƄL~RNr\n$[:i[/G>H !&0PIc*%Ijn=cwH9_^d8Ɵ8v9N&A+0uoގ}t0 ;lՔ}"-DP^w2vъnO@Fm%UǮ(T'z{)ZP][.T *&FX'Jp :Fa/uKif:"ly2A%lD_'DM.[%q +  )IS\&ZceAoe{YE^QXUEK.sgL{Sݢn V񢘶ؿ' /w'zQpNAݎگ@6Vge@H4I-{su/$Z՗p$3Dnh?'Y`֞|v?~xK5|$;NGKCh*"%&8g!Pm`84/P `vkOy޽M$hs\&?Sr݇dF}ׁ'9$f`ASDyT2P2TKTAT>U'+ .|4F: PvIubqGkX1%o}=wg=a ^3lSOCanOĴffw*)P /`"Ո()IV,+`&qV ]VjD`UtAY*ɒ$:.p%%#J32.9W(kv>EoZ2H> bHd) 8tfA# : m `4DVʲD,Fs%Q {;SƌYe(v_wr(`C X5CÂКLH&qDChs VG}wr'{>`&3N&}? MY5{f;϶5~diВcv¤ne::GݞwPUV7%0oE{99 I:SZ_Bgdh<b/W1 \B[\@h5;=Fopߨ,ZPzj?`TNT6+sI_E삺(Sd:%!6N2ω7hP)g!*/[& 잺h`hwܲZ3Bg㒣rMlpS@ -02v4n+? xNsSwGem# AHв h$ғ1TDQƖ$:ik4}{1r&FM bhlg~`N.[iMVhGC,_o37w<@RζZ-Lm|xBITXL n&㟏Sp͚)+! )W# KS )OVp#@Pn3CTp,$miVX! K.!0>88CK8KV!<~gnnV;SJeÉ4qhč e$R#ӈUJn1\oH*W4B7Q혣QcS4PS`,1x` hXal].S\MOn #?z_ jmq2.bEBCɫ+/.  #%NhK  ,;,l3,E&ሑo8' :C*.%S-iSbt&)Xے*pq>6w q>9~ t  kr>56N n75]Xu)ű#Y AӨV8{q KQkM` ;=7.V\PA0 oK1 ^^4v_>(o3O(=.f_ IV㙤3X ߘg:15Wb.f e ȫ@0DF u/kJ}1b&`o{Iu2WRE!m@;|fi %bH&ETqy)WpDHͮӵ>N:+›Fz HSu` RXD_`c/u EO*^ֽvC:)o/h\mVq*0i +q:EЌ2]jöǿ$a[ "ۇ' 4z:QX۶ lQ"doH'dq#Z90Ƙ!d!s?&;ǙۇY @i><:Ox^| {îGM>}_ak_n_<(=]x#dn9ĪomytpFp+$eqp[泙wgsK2*NUur*'w~3~r}3Ӎu[&LdLjP ?YO2D`8"q+_uQ_Iw7fDA9Q0 jYS *Gy$YZJB0(ZR _ՏI?OnCuZ-jM/ǝG?܎ɢٞj. WDPrtӯRe%or.snUKg#T|:s}u`N+_WG* $~kvawt.ٴ5s~KWo?ve(c(z_?E'0hhJq*PvQ)[V0k_z cZt{c%=Ŧ`Grґ|""SҠu؁vӸi-I#F6n@օ|"$S^I ց }_@4U)?eOx:\ Tp,laߤ *eU4hL,Nd @KT)1]ݹUܬ|_>B1;w<$ق+uMj opxLhe9<̿TyD9іEG8J& C#7HwC⑦qr͗]on~onO6dXz229CL=:aD2TMXPr/I`Jt˚4/( .Kxh+ ~!;lG}㑉 4q!'1<' rVѐ7)Ie4\oүdEO ӶGV!s |r=ErZ7E;s7\x%a]~ O^ =WsԨ`socq=Avc; ?3333ja\Xm|IKaCbeX&5CKkg:GI0(#3tWO(p୥D(p]6CJPDF @Ԍr"A^_K^x~uƋax]5}4_doo_{~U+0&-l֗Fc-1k)K-Q YP}_dB8OqtieH1j]$*i,6C+0WW`:0=Sʾ~Tu7.2lszF.5M켉ovҊ~@ׁ-=Zy qZuBc!a!fg,t+8%<&/͈oC4zZ ʶ3/ǺBR:~kr"^ɭ*N}wC.;!w7X#(k|!\¶Mv{Tݣ( HwqBQw$.2V²b\5E|]xMQ9 M ܔ?`Âeq tw]ysC *SM!Yn2H&QxQ"SL\sBp5Jʽ|zD4u)ō38֪J ߒ6}^~~ޒz(i-nܲKiodi')6 *82L5IQ1.qWoUS6at`(jqoGXO[z ~os\Nr?(5?ހRpk|67eXN/KZnjus5^UkMZb9]7Gbq!j|Z;³5 j5#õXj2\6 y;*=7>-]|:Yõruc\c9/AJX5X)쑟lXA}~܊Y#jgj"G4}c۱>tKpZaa7qAºyx p[᭚ʧbbnTQ8{X ||Ͳ؟5˂nb]$ڟ݃XO֜vkeoXuoniiݡYϟ=|Iv'תkQ,ԸRnKm 8B[;:>> Q:oƽgϣ'8/1łWbl{lpQ,1a[y0aAp6B)o_0 ]p:}޶߫^gA޳E Vw.4Op$JWG[ w2.Wpۢ'D0 eh>c zNQ^^"LLeÏV!mY}aҌ0B0da%GȀ&= UmLnƘ$'arJU$E&yvɉ>IsQh/7/:q4gG7qnJݤ&ϝ9.@Oh`>\(V35QJ"](@`A9oV5᭩XJli\DM$Is"]G`Dô(D4|,BIr*ԕBcw{T9n*!RLji{U.xW`S.$$bt;ǁvørqF햊APg1bxD'w !߸&Ȕ~~S RIfh(N H j7D5ײaoeͅڛr֗I.ȹS^xȵJ /glR ,$(D]peldF3YPB@+!()`_>kB(a/\KԜZLf(,'c0j3.qL4 B𼐦C`mZ%@@:iD L0? ֽ\4(ЇX=!t{GL@9* i߬f=~ mQߵJy2"~+1 9%b,I!&uJ:x꿄i#_F`R"38:vs2ZÃ$[Gp؀֭tpXOFW՘S=5MZ`+-4 иX._~@u/-Wg no6Da9ZDZ= CE`zBUXRBu#};4 Aif4 GEoG="D^Z]7A ]鶗"}<+~nuݦrۑ NIK2h ™t˹T營#5PQC -ŔI@TLJJ8EabRk8M3m.MTF %Iȫm/dT!R}ʔdS@EdTöS}2G9*0[F$09-`GMv0 cYfA^Ea;kIPtְf;s?}Qwq}|3JdFKsYIh#[lW=v[m_ɏ@wy1Cf*dVro$T"f@kX!Y62 tOqw Jh"'D [XlI_Q9pCȯa D'qE/Nf"v9<̿TyD9іEG Żhأ5EVvU1Lt\Q҃-Emd[H4![ήs2ns`~³lXytK W}i P#yӧp1.-pW,o@<˦C<Wa\a沿XP'vw/fcT!:Xd?`BaQCN>%Ty=9X&eISz>1H!7͵{(h!N\E>/6 JqWؐ5PkO#~#8@^^FœV& {=^)a[ hXLrh,:_|4)ŵɞ-naw:b β),oW^ޮ]yy v~0e:%*(ǹb\Y#r(8猲. b B9Eؙ|>A~3y ewRz3Y4 {aӺ'Ƀ`-ԏA~/˄ʹF;,VeRËqst c229ӇI1hq"6rJEB5| 14{{΍ϐFz,?V@PmX!td478&+PEmWE*W$Tg_71+Jz{VC<{ 1"@湴B!Ly QA:)P!7T)rxg[VB~"XDNj ^ 7T0ȉB(Iiڗ} 08GÑCGjpwbn="Q_Aٮwgj+yc?}+*zX]={gVmޞ1 #4n ?~|w6r͂Ug)ͪg\T*_,]5J?&9X91ToϮ|P+UwqB A w1uRj!6( .6$/"R{ K`-hZ X MW=yQA _7ꝉV`p8'Ϟ P;{|Rd9ZY믟'Qxxǔ!W;~Hn Zu:m5vGCQi[W5c3R٨+MX{xn!ׄr:mcn:m-X*Ur5'uKX͖uKڡ7dExPL^jGѡҨ.`UUMWGL@_yqxPe&GO["!>|0E юu}kK> CΤTVQEcFxQR {HDG(du#9 z>wH31r=jj$.5 J lǷ~}U!H7#oIDAOHD DR dྤR\Vj(es0d8m2FϘM%US B%s; a2G4@=p.EDJPE"(T^s)׾І㒳9BA\*G3fzwJS( Q2d_'/Rƍ`'"C z U[ds8 Z0I="$8ǾrhAd9U;~|] e]wmU9 U-ح9.vCXQ&t/oݢr:&!pCe{7`}:?* {?nQIL+-k\e$!rM)^kJ%TVJ\Vue'~NRo G=5oy `cwmu8Թ8BB;AQx}܀1 wabLTk?5S%n"&ƫn˒ &mY(n)Ӝx ƔL_QH@51ݳE%J,HcQg~"0[ei.k=%U}ɓÖ/0g)*Z:b/*WGmrO?s/7׸?P\=0c|v&1ǚKNgEbR7[?s\~lQx2SLOd>'OCWhR*eC-(N8g2S=ٵJS8N p/mΟwx j>$O.)2%(iþv3LAb#:hݎx$ݒ'?v))>z:K9ΡXZ l]]D?|gZuL =LfS-)\Z-'få{h}dB^;(nx0DQ3妉Rȳ\&D.3Sۜ#N!j w D[#]]ܴb+QxǤ`GC"?aec8+s^zLb =&1s#ۣYČpEkC{|s^W;Wv}dڍ~M+B+XV#0aZ]Q=&_Cp:֡_i C51SS_/7R<ʗA Ɋkzv_T4*]~Mar22+y_TPzL0Ғ.4pzuקLY/KV2(yi.sV*AUNAk  p w ,QR_R_y- "҃6;u`L 4ISNi6"F? :Is!  !jI @-X"1 +OcP+|*p*rB//X ;ቒBΨVhDp4'hN6['-Ӱ 'vT=BPVђ*#(Jp ++<+-T-|l~@ 'n?oюWOo8:iLιŵmՎw crCny3(}lpiYr4<}PPz$4ǔ5JB1EKǦA h+*,RTعVH~UuX>.wȓhhi&I ]+#Jk vM ,qgbLHM3Õ@!̕|;UJś7FzW T< ĂZ*V90TzZ=AtR[<h~N4Z޻ ^7Kwk3 輹']d7{6?5AwH~<:5ߊǝBH@}wۇ_r٧qL^h̿姐Y͞>Qez8|($=,Bt[I UsOO>sJvз4n1249:͜ʼWiyMnp\#g`Mg^Z-UDu)R }dx"}GAR dt,8JRCkfuFsި@(P3Zk@^;4fh{[AA;;ѽVO;ĩuǘc{ϭ\ *͋i+X?"qFc ҥՌ{u8 ;\*" JhEad3r՟C&2vs2C8d”&tFko /z>)1zI<'̡б.WObI KN Å9hV <#Щ(HɘEAQ{>(|B\dy >,"%SN@.HK#U%އ3Bնkz!>Z˓Av5qAmD+j2x# +PV)*X{D6r網Ymh)|A -uAP N+ƴR +m H ,^FmONkW¬!7QZ.sY%E 9}TsC=aO+м7iHl#2ʸ@SOA.r|\}z}G]Es\ADMYUJ8. B9+-/l،NO}QR}˻/]{6/4/wOT|啭7n Jy~jmH{W>?L&" 71YɢMe^cI>8m|A. ;kdL(ۂRPS$f4Cen 5DZ`$\U PsIp!E|%.^D;ys(!\q8Pi<epD=Ɲ0IWzu8q]i$CF~fEqKqk#}btnm_}ñEǸ!RR?'@>)CnJ.i4޲tQ&Q\AF,W}9E+[==Yj&re-9ʇhtβ?ǤD{&tTjGA6<::hSBm99At3{AQ#ablZ6`s>s}^nރq3Rf#r}A>mm;%9x"m9-} 4RRF8IU+]PWޏ1l }|ɡ攒C1l'!(LN5CU9LmI~f}~fgTߣI^='B`L<,еcNn7r,VLNKsi`)1@j׎é~'$I (j$ZǙŀǙ[OGå޴RW-;.Ôȃ# BȪ Bz)umJg0RN>(wݱ9Tr2bwWE[>+@50eSLK?sa5^ ؊]O:W#0|CpdgC] .s IwL\W[HE trp t)ٮWצUMC0YѪ֨n[]07Wڧܟn-[͉h%1βJ-4g=#E#ON_ CBZ7{oC[|QI,VW EEi5<ԩ mhɉ6.K)d)u-Ro+'-xX'51/@*V;ehQO i?(n<ㅅg8wv:BT вJ(ԩ%ELirǔQ[o, x.Q}ab2Ś8Q$ eqdE\ K#ԡnAUeJQ{"i36 rBKMJ"*%-BJ] R Ye,QK|2j@DZ $tqЯ/AVhBd~!oڕ5(ɸh{:DW/*T9&]0b1.+ YHjI#sbd )&FoN(2n- l'K5zEYx%F+>kQN+&ݿ.ڬ>?yezBDO) ѝ5Wv>ߡx=XŌ]أ 7A^"}&|[iMINwMIW8㦣+娋c,ԔFwDMhBݝre7Eq]/EkErãtc5#Bo$YφZXCV(O[ZCAbJ9GHo%$  )eLgb F9YY.*vl] R2xanjI|Cr!o RnRN$,KIЛR.~y-Əe f3jzOmߋ!߱J ޣ;ޡҴY SAXɢRzx&e=H+=1w4j@G>Eiv%Ecihi_͞{\z4s"YFDh?iOSǽ!۹c$FsU۫@n.Hk4kId *$UAMoVb[z"$-W}jv[2呑:#sQI/5DI}Q%JS5b%|TRD[ -ӕQ+ ` /D#(!(Sb22U U̚SݚXUt"Gt .T'2 )?߯? 1ZC|U;F䩁V N2#z`${[tq3#~ˌOk9B -Ǜ+҆jitoh64!GwHyd*F2&yvH9χSzt;{J0Udh,SX xv"9kYA(&Uw5]dǚ6$G tKn՘+PRjJb,9Q\BHFEL0W,5%ᕖ(=B(WᡆĢ=ѵj(n SܛԔnyU-2 ?|Hnķ|/EI0S}10/O֐v1_ .'-'\^[<r m'cM"&#ĩ`cGk%Zn_Oq]tnB$w3x, B) ZqL0\Vb%M9mY`)W9%y 6t9+o\Ay\Ǫ $ iaLV٬ 9G./y1smyC$*wP |9P*I.ȪRVTjRoUi[baUz8La $VTst," CK  QX*&*]rnLhiMҀ]̉j5#nUHrZ CL_2$Q-XDi44֊b#6x\J|fV]5]Hо#`5(-^aιS,GpAQJ j bV$54F?o@`QwnpI{$#,|d#1:Q(ײ[I3g݂cȺ}Ɓkh%o /Qn5=Lєd3'ԎgU8`^C{[QsX&.ygugII=2OHCazS}W ǭu G]և oxcf )xG̛{s5o[y# PՎ*s֋qHTjT;>\{#Bo 7{)^^ܹkZwMNq+C*LFO[˿I2"瘄 IuUpJZ]:oy9"wt\:Yn-W=l9CJy\Fja:wzb뺡ƣRrNT$XF+[b+(sBZrblMR YpJw6 i&pP8+K~rO_E'K 8ODN>293MAB:L DĢvK9\vT@,^u}u&R4XnO,@`=4S S^&v\?yXemyn9dS/o6g,Oc lX?ao{!t} pfEQ 9Υ z,%ó()(쁒jѓ4GSIz\Z32(㳸kMAP -lx~r%pqOg_$yCnD]IRٓkCp5AR!Ysr47d%hmNݳXa]a*1dJa Qb8* .43ku[F;qqN;m~$+DC1"pجn0J {{EhaԆ]:Ѿ"M 2& G{;)xb)'0%PK횮>ZBZ$^o|T#sx +G8`kjk^x !mEJ[u7gL|ӡ̨ަTI!2zso!ֈgozyK뀦L]3ae@׻4:jm5m6BjE1.cݟMCy(ߞ%qBfK~{o%7T+nbZGMtmtDw+ĩh x }nh۲Tf 2!23[TZחKhsS Tr 2IBHgLn#Zj2!m摆 :HAq u䌼)E_@ 移t/wB^'_Ͽ|t1hr3Ej(UY^%-oefB`/LX_?la~3˪#,ީ +q 7 Lk!}޿ ޕ5q#w\-GG+i&6f&|с!fZ-Y1",Ud`ղu_f"@"6*m^A=xXFR3"L<棡s*&Tc֞yd ذ G.oЗѫ{t3ZN2뢦}1[aacxeIJ:Q]V՗JUV*锚R $Re h)6((|`KL3NR2rp"c V{i,qGJŕO%I]jإ0*&GHaϥbD/hscjRs ^|mY,&/{ֽ`ڌ]]Q&w02XD?'  ҇VJ|9n7hEO)+[Xd? A$LqoJ\Q:]}Ǽr,8a J'3@OJAV| ؾt|Q%v4SJ3'5L=%٠-B n2cJI71h(LJe™ZX +F'q PX&hrf:0brK9r`%0'x7:6O@XBXujzW+QlosiDl>E^|*`ub~J-hHN@Ckӏ/Fa<_,װL`5_!t,_'Sh ,Me)(7=f_S^Y>Xԩ\a_DvE.u+ҩDGuwNv8ZQ"ZTbn`4VKbJv! J| `c4 @-8kV:4*w)׶9Vs)~̏`I.o Ua~j|Vy/Y"ٳ^M65AVchu5-al3CwYBDžڻ2"%6Mjը*Q-L oA']+[e]:Xt2NRiE핇eB删`} *kbe1?n"ia3'թW"=.2zu)i ņ r{\v)*^OAUR]AKK0[N]WﲕZI5][ZI!] { =C|ގa?+)bm$VVՇUB%H.|5ю_in[Y$.twqvd1K6,vճ'k5W. \OW,ԋ>GR2CM>r4՚;j9S3ȣ|,>2Y7c8t UP  7g5aw Q}tРJ :ʚ'ep;2BKYK* ULfriZہj]qwY#^_]ףN(#zfrn<ݶ)@Ѫfr-hvYmm,"/7xKQ.y5 lk;ƿ4H?_F` #q]J|Z5/e޹LRi9pZm%^x%^d%_O0g4V7| ^XY !q3Na18;]sN1Xu:}[]D{8[ T6_}bTLvr!I3GEv2ʬWxm˰Zq]bPXe7R/۽mD^ \Qk= q<<`b@z8r,eX' V,x!5ԈqW~sddkʀmwvޭWzs<9zx~TE8{ 3:pLyt LORmXWZa#.p<VMg˕eK,xvgON"Ms]5 JݍȺŝyRf.r՚^`B[ q|Cf@.Jh6Dl~~?]^N~ g`.cȐkٻN9e]kZ\koHKD}J>I@t>`5p:@smmÇme7ۡ]nf\Ҷ4rkۢwnmG201&&.ts_>|HUXLF[58otn*׾ =kŒR>$5THʑ;S j\n|xr??0}&FIfff2ZY |@_cEE ˮYHҙj{2&^W#_=91l1RloäOs2T,yM~q?N9skB0vNɆC匿Y|nA xl*kY#Y/Sm#faW6C^`U؟2Ҩ9QK:Rǘ1ÐFS>5qkdjK+!w=ejx2Qoq5\n϶3k(LX8vR ./RO~ ^1 XvqRj *K20Q~->͸bn?:@F-.lbAqݷ'忪^]@=!#Ԓr_&_H_+s{'DBěF3Gk%UɬC mq* c a1LqB`D8c`3AP/]`sc8*-$ 3ޛ|㜵-!/Mm{)fEK㨷T y_'\aWﶧ/#ؠ/feUz&\ɂI˷lyAw#Kc#Gzԁ?`AX\A6`A]9mM][ka#NqyaN;&R\ʶ 5B01mOr;: |6嬀dFs\6R{k%j' Ϣ%Wk}yԐNЄCuы-?ǒfq0ݍfiL]=\|DJNƿ)/FY2&mBcdO$raS޼3ka]~>|6|\EyXΪ`4aRzh&`S|#Vb%IJG\GbtF~% RD pP^ # DGq?#\} '+m{W Մ|?`ƚiIUW4iRS#Vn'=b?"m'쓇;]O>Ԫ+ջՀ.;lAj}+̷xrd3_~?}(~zdUiէVqx˨>{+چ+6^BW't|6O E@sZߧ<Iq eg"i_2X:#Ie+U\'))t_ɢBl-z>U7 AV>0yqo( Hb2~$ O:Fut6Ng%9ʼn.u+ցa0:DayzJL JN6}^`&[h:h(cĀ=ϣ\l4o٘{a~ V' 3aO6:!ALǺ`#Oe`y!))Ä ­@#AZ&7}4.M&8ܙyo"ȟރJz/ 良\;\u ZMU-^-~-8NQHRD,}*%ep e8"4hnS)DyZ"Zq/eڂ*{ ֤Â>t\aIJx+5?R rTȮʅgCĖ+4j>p=8v"B(gCHhO[Hg.uJ{RCDW|X˪u ,&.58 nΝ,?ֵ,dXjHHuRBugQ3 v=p$@2sBn.g/OI3L$=PAZNk AJO6F[Mi ,Ma\T]Ĥ[q|ksx>-WgTq]`|wHxvM&'ԣ>X?<4p j )DlP.&Eiv u&BZ 0+Li>Jb1n*뭂>TS!Lj`QLR RZ ,@t{kdw) \6&; LjM"ab>LZh2{W@ sIb-8 L`"h K4Sp)%5VQ" l1Xq% $҃aPhAO A gZV#tFO 'jln8O1:H„b1Qgu^PboD `Yd;p9 2`ǂ&#),r2|mu~1XFHL67n#鿢e=Uުjn/f%)cm$y&٭mPLQfcˤ@v?M [Es\:9C$(Ks(z 9XT>0z?R3Sܥ"u.awj$\sa ziG SFp|xƀ!eÿu[: [xc &.tD{=cx { ՓZw&9Ou<L+*v};B8de3':`?Cp XUUP2pJ,SRf]X#T;mN@eri`gt#ZP)[A :?/†{3gr(5?b'N&L*y(Py%72PFHCr'LNu#eJ] :V/K/n;ݠ8Rh)ks ʬZP\I[Q0횖\w d)O=e227!]"ԐwK+ Bk#vPj+.o߹_/g|LKm: 6ݣqQx>}ゎBP;gP=-x`Kۇ3FJMʥH md:[80=۾%P`={t{G*@qҡAnt.:u' 3'_`:=c' $г9.(Kdq\b~{l%C 1ed]pZw@ LRP|.u"ZJ}spXwhQ ߎ֣` p:ʋJgbܼtAppS{17aQ?Ǜlc<;?5৳7zywg<30 g~Z l0#tD].U8]_sj-9%Vsr>zeQ~VR|[B5+&_ݍ]\R{x,թ,.گG@tE?1Urov1Fo=N;^ŽEIFcb2TF=etAIIңϋ@HC̋J~?.}PBb`*ɕލ_ =B&s"w#bNmӣUP|ANDfrRΌ63OSYeESFEZ^BX-shts"hk\TTY(1**W8"t$(KhJJ;.^Cջ(ݺX\XZ)-E5Zh"Lje0j#uxݴ(9K S S M"0׆R&O*D6|݂KiViڮw""%ccp_|xEe,g9MBc*FN r2yR񌕔gJ/cZԡw~< -D6d(yu`)J;#mGG9v T Ti)Þ:MUBW';|n_Q1RHO:?de;U+_AՔNzyZwΉ'o}.| u;`sY`"}/-@k'K?1-Tw^3cF &d_a>1&cZhz?qd|KNHguj8VCwRRy0Op}4(r88Fӏ,QΉOgŀ_}`~: BD[UUPRb?&y>dhBAÖ^lm+&W,YkO1K86VDcjxCͤ`|ʠz);}o9J80 lvk8$`7#b{ۛկ5IA_05ĕRvCareIUduER1@IL?2G0m ,T>F* ;ܫ;BG\Qaۛkt&G9җ+~ł<&h^*:@']A;42 }kiS#%XqUز\Ik$V%c @x&AЊqĞ{΂w"U/doWGLMa:%d#M2ޑ&&t*8V뻖<\r!^ݺԔNGKx#EQgXOVPHd~x=#ՎțCʪ"eUKNeuer"",n5p6 /",)D9c42rJ9ƿjCcXmzH-ɘFix7mRsFKjvabc_v 䚻)OGԁ1iN$uncʜv>HuʔgIS .g$紷Y&K(]ѣ 7h*U/H{TRB[A@C8]K\ C~?>RGwJ*_H+ 4e8x/"67+qLe<ٿ0;rɂDnQD֙4g+oϊ 1cN;0kbJBO!(yb %r M)O!摠Bc5sÃꓺG=]yEs>inW4i8m.(n;~M5 /ٿd^ B4mqchNa9Y32HT zJ>-o,=9\3HB3H $1ԐjA9ZNxV`iLW䄴3(s?~pZ-_xLZ>M6&@{P`g>5񋌑TtQ~+궋N:tJR>-&7FN~(at0KI;ڑv/E ;34gQjWf8bp6κ_ M._уhPDNUдFݧO>ഈ;qBHt +תS NZ1 qv;v͠mMY=aVyU9>D}wXo@LܝH%{?ޓar4_~'s9T.UwIck4T0s/@o&ۤޤ r3>OVrIc1NmFUj×YU,:\Euȓxd?7# n{Pt$7r 8 e&XhB}3"< ppILnstEgyE.k6Γ2 }g:zEI4's؀Ga7}"cBY~ki}-<Ac‰d5'[50,_Ul53^]X:eR-)"!Pi)TRcJݥ*LiP w:k8~ ׸ ǽLN_}Af&Pu6~XTI-T*?Ci ;3giayLj*bq15K x4ƵŋȸVۊKw'LG!BQtL:Lŷ}?7#K2#T|j0ɺW|\\F1H3ss,e^TiR!\Wؘp\LZE/"Wm4)*?m 1˨Ы2_ fVI(ľu:W',q֤ ;`ĉeŜ ^3Ƹ eaN{M*Nb0>feHD02"=2y..eqP¥ {RXLB5zc@>'7`+KƮ[WJR&KMkF_rUE\mmLVV9lҞDwיn* 5FjOsoG%b>'wψNP4eBSs8.Msl0'adf"*#R9!QBjebΑ 9a'-m%5++'CъZըE 6(]AG/ş8^@(S1/&UZ0&=^j YcJ6uҙy ,;Rte{jW(68ޭmujvb΋+ogS ̲ް7r3oE3x)kaxEL3GVTBaifFϙ0xQ3_viJR/Ń5y/;^K՗dL\g}Ȟv:DМ [k'/z;g_]65!ɏkgξմٸ|8Lxp$m%IzD,&黄R$H;K~I^2JvH>#r{z{pr`qKn'%F&GP3ș9kwOfضN{x 4o&o΁IfN1OcaR2$ D]c_fX}y2yxu zbؓ6y<_f$y:v9Z R0x<+:/_9i_~r\RzA._MxFwۻA?_J$z`?yRnØ?ttZ|+o~_+]3wo^z2ysY ۓd%#I䫋|rntRm襈g] x%C,G4 >G#q DcYj>ToLaOҐ4ӠpR,XJ*dϦww#_u D 0ackp7S+81NcvZ?OanM$4?`U_'%ڡB;{ޮ5;;<8'OLsiaHo\hX=[{|rz9b: @I{PN;n{xN;fiw#EE)i Sӟ 6t9o'axs[1zy?6GOg@${ iRX4lrQםNۅˍhwŭ$GWM;,s)U fB OWߥQyjy:{PC"T`FŭΆYO7\0Ǧ@k_O?]lywKu7~ݠ}%wڽrv'~od?-ڊdOG@+Vx 6v`L% eZ`rc ݸ烃G`Pӈ\޶XJٍedu8 Oi#o>aGN{pWLdv3kJNY~۷Eʽ5ZSY溯}JG`?: -G+*MO; ʼų=GH_T6Flo'\h*7F| |ƶ+o,]hr7?Lq]u6V#DW8i%τ8wXibJh$Z3L28SIw(':8{`S C {r>8caX2剖3 qk48ax ob8~A}* p30(.g=}jON ]T;Xb $W ?rXzcRJdTH\ЋQ+) FFqp֎X5Q7o-t̛ivvsdտ>8'w(]vȴVب^IL͋'"P\^)3;(fdw3fa)^`c܁FevdyJ5Rĵ[FzW[qfY<~MY.ʁ -`٭]d~7);uQښv6Lg}Ab#]ʔRa2t0P,VΧ{ hE]~DbJYB/GGjoԡ2WSx+̰-ÓeJ/c^&Z"?#dQ &ⶬpb-ucv3pV'^ϒ^$z#&yQ/.0 i͏]]o+Fط(؋Ş]7b]BIPvm'GcvjJ D%b$d `G]G/< 0IbBo[VRi_cīg֗t#P48 D6kތf-vл~Dx-߷{PL0}I%zҪE*nW ovbL RC\5$S% 7si5sfL.%'68dR1n8WUsy±i5EmSIoK]zH;{O YI@v}ߞ_qSf^vح r_w?ds6}F{*U-5]@%^>toA%KcIW(Z-jZp&Ω=e$ؘ]VZ/MMwvöF暬qns>0MS1=HdՉʈ`Xg= _uCXj.eKl \AaG1u~]~Ũg(B‡`Ry~Zulh eW|,Y]7FkѪLA{ ܸs)gH9MW%x]هF& 4*yK/?2gm菩31WA6TKkGMrV+,TRlc<8'VnLH᪕X }||Wʷ&=lL~L Br7@EX%Ŵ֊ͭ2ϔ=t7\_oo֎Ts$cge)F,V.rJ2ϔ/::|/ۨIE)5,10V!M-;5cFꁍJDz,QƓ K/$ 'K9hGr!Y*GS˾r '\UDl렺619Z' '\Ľ?*4XH.\s6ȓcvǣRg ^& +ژB !B:L{Xlc)aG/G\C}`fd`H zņ$!߫>\ڄdBqSS ýksUHGKo0 r5}'ߠ~jvFQGWxiIݙ0.c"(AW҆ \:l ;ʩsu`WE_ tYSQA9^b.thq3exd zE/d>H82ld_8Ș{UU/lA>-ȉb _qLt \LӐk3k!1o^E.wlc$Er RF#ނT^mX$nuܲhlAyEf6 Ќf{3az> pk"](g򖀪rb`GZ,1܍o_/YcØEدo7QEXoZ]VpOnMW][{3/ C=ap-jD!&댏]ǝIS'7'g#gk`hGҪ<SXS5R,8|T|>+?8=v1Ε5q\_\^[}=R9.l+-pROjcJÁ$}3d7{0z=wO{QR&V*oc~03y.6[%>%t@XSMlg~f0Yl[܁cpQ).Z ?+La>? ܓyu@ފN2]]V|4yQIe%I InڭSi~;0y"J V%Y.+`97!AbPZe&\|G"0y&3,L. )!C3 SaK[_ԉQаXm-1'.+`g[½%v*Kd 敂fޅ rZތڏ惛|i5ZTR^8S[5$;d2ۙeGվuEfK, b4B4d};0`3C8KJ+/2HLd]'agc(NiF!f@ii^q~0a6HK^J%ʑ7 e;pQ`T#!0X@027D1~[_,<ߟխc\?~xRaܓ}svSZ[<~l^]cWcsEJTŞT?*ދ7]ަvn;>=ݏDvЇ:ˈGYC|5.p|I|ƵW`DDTe7PD*8 EΩ 9>Cߺ~ {Am+wjZ.VjvV(khs[dwiUMI\xT 4S#7 /o!&c+4CۆE=//b N;ImW *X }h;㻿}C]䇃/.G "C!?l?_]\ͿYAOG߭7 7?9f/᧳b^(m".K+h1K1O|rKn26.nŠ mG?FsHͩס?ݚ3>j'͎!|3͌nN7gܪﮋ֓A.ضxan%[ ~a˽%3yM^xkzӔIy^JP_gB敀[Cf91HA{oٱ{} w*KfB .w (nبl;WgilqHH" U5x9ow&2*V?.O?]#7I.ؒ/u{IGZ+4{3>+0Δ {k%YzҸuK-?y1_}vy}\~o߿y/,XfOnj*1ejIV˷WuD}\]fh{"3]DVN=iUM5ҫ1D6Ϧ9Y Ɔ>vXg(\9n)h+ι0{)x\r k; PȻ(w+EQ&?_rrgY,CK]6Gjiȍ "azh]wj]]4R>N"- ugs;4z4Qxr6;lp]vQhCbC_EeOr'N곉FncՎF$ yh}]jwTNfS~G.kd.9 5/ lv@vip񎹰R1N_~9T +;b ūs`yNϢ_&1%&'P[rjդ%Bqw#O{_]\ܸ?\u\޴J]1UI6Y{FONfTcB/K ݋R&,y-#0C>ϠJfB1,`et vdd.=}ON?QXvc)q%DSKF"AVѲ(ƎDđhO&DxTռ=+6j+Ӄ*=(| cZu#hU;7 *B, &^Z HR)PXFYX"F ]O 'dwX(hkMѶ%)F2eiyʕ-5ぎT2RKVd(1*0q^+xO2E畅 FM iSw&C`Mw24_'qf \ m"V"Ŗe?YKz5s!n,}u.k2(ē#ϰy(n.p03(bk1e; 6PkqZp2- [%48ݝRm*mv !ő }(D9ÁC$'Z C($8$ L%|ME gCj0W$RniakZl2VMF^sebYJ 6ê V̗8OYa{!aa%oh\f`]g͌1Awc;xo3`Y P-Զ Pk[P;DR VpN M WH9ǩ*D $H8*o,Z)U*jY%H4_g @uR m"ShPC1v&@yu%l͂OL(#@)YHqs#ӵ3nlт  x d` fଖK 5 s(N'LZaYQ mql{ )y?1?ovߺuiVz΅J<\vnR2I-RT @hOL36wQ)ap}X QJ93(g y`֙%万Ի1wݕUH9I$Pb!`E#̨1;\hutuehj,Jf+ 5Հ wrHR󁦰aS8d{BC: ̛@x+o(r_Dh > 2#`ԮW]~&&IT@ϣ1%N1x]T Z$DUo`O_~YH5yWr@'>k˲A'_~(n=ddg|.S)9m۳Vg+!6m%F)t0n۲o,~gf#{}N?YipT~sѨ_}'qC Ryk(,E@Un7߿ã7o_98:|k[Ƨ蠫/߬林yQ7Ͼ98zy}CÃ7?/?<{tQyTfGY,ێy_(wxˣFmY4`vr~-rbώOMV1=iM[N%w Wt~}TCv> 쳑eeUXm8ɛ83<Xpst%sj偶<Ж@[hmЖV3n &μ<.Æy`6<0?0~6dj\iK ̘G\iČdJ'm{Ь?RuhFCМFb LS4;oDILTA+/׎*i6xkVh/ޭĂjM5^lrXTy2vGI s(BF"s~H3Wc.Q9#VZg$Vi4R؂8H@) % ZHCL$e$pxVFp}֨6O)wSڰ`oXfL?Lhd$GQhQϺ?Lrzn&XF01yIh6),>Vm,aX8Socmg19}WE`Aq%(I+T m6Ab; <l"vCQ.T1e&}K%q&nZs,16IDJ>ax,Jtt&@0)Ol+  = yZd[Yn[[HT@;K,OGc踒Ri@Cqq`4FQN[pO`B"08ذ`R;(U/ 9B70r\(,))Fڄ9H\IPl%q .R8VBv)` B(AZIy]G 9)tڷ#lbsi$dfK5ATOv3iNe I1(8n tC@.˶GiŦ,i#p:5խ:ḿc(y>%nc=6:ܨϝzKk33 ն S0k/ | sW,Cq0T@/f_ţNG6'0OLاa@?";1pOKSdWylOcY֤8kO=OÅ`TDG=slɒ3tI!2n[q|/l wEo{uvoxb|]^?T:>Ύ0c]8E6_2_=Ͻ(JX 3Ň}?bu?w&Wao|CA.я_Et7ʣՠ۱q+h2 DKz]Wnb1z7sO/m.ϟE/^l }0e-w/rig/<i?߬T jѫe}7/k/U ߩ4v_>d!L7N61PmYq?Y$z Tod7ͱ}M#p"lVO]Q@$aƾ˿/i9uFv)z>U}SaEo E(gnގ2Ldg0Jl QqD@a_zt+R _9}<|?:ӿ< N_^]yM2gvuPfYHy{~]ڧgN}O֏zWe2 L_~t"&\)?[2{È YVa(Oo4KagˉRL> u"ȿ=ǿxhͫaSaZ18&N<[7=H.^{a7>^ . ̬6c $H 7+ ݙ1-R0k B0ۃ&/ NHIB 'W`X  0R~cG/`NݗeQ$G"}̏wٙ5S G2Lf2b '4HY5qJ:+~S#nb jGjZZ:ZHѝ@/6x$``$ }mqib/]X0(&ȥ1uB snS$áf,~*l ɞ[M~w9wqWSP%h1~?_ Rs$}1~mTN4A_.MlexrjR*-,$ aPD&sMRzKF&\iS$T+z'a~]VF1'&OF@ڏGn\ GK`YFӫᆴ:78%muGӱLŎh&TGȵհRukR]LAM9l5;˴pb1: Գ|A27pWZc*$}o{DzxwEfi,8R5 x7A #Xʥq.Z|ENyˆR,ZZ!WxB1Tw%@$췺Z#WRIJq(UƦIEc;T {/SEqE+Dih( às 1XlT``+Tx9ތc>naIVyck e3q^5ێΤjZ5E=cRRuTJ3fOI^sKpŜHҴg?~wkZmּ:[ $[&eɋJ8ce›;7Jm5+ rX^ؑ$|a^񳰜01']Ӽ;vaEۓUџ?̓ZƂ~2&hܞ9$ѥMsu ጼ ܦ`؉< uu4ij!]R";j7„T4a^2OVkWK u 4cUR N*:,U )7㰆O J6>ۍ1V*qY2.iR9^r0Pc Ʃ:!d=IGEٟ8Q8FY 6_"~kէѤ >eU UƧ<% 2ho>e冖)!]t>$!.H$ܤ ČNq  <5_ _b~&Rbd,1ZJcqX0ą|g2+0%h V (cǺR.efl)"a6w/domP#ԈF(fh je*]^y/?kh59Yݭ*([!=n6ͼیݻ1bH1ӗw/xS~Qmyk΃$g-&(EKkQ!~^l"U}Ai/.v Dmm{ $iˤP.1Ow)%*hw27JF½ F |` &wp4{Y}KSR{M66ֿhF-};!pMԉWJ;P`!CmLg|kv*V|B;4pF:јrbp%&/ V/d/  &o2$%`_n{#L\Vm+fJZLe]澸Xn"MFiMҽQkokJqT`]] :YRr` =3y~&X-k Epy$j~,ct C%bͭf-t[q'` m\LQc9]!*]| wWY +"U)גZ-G9l?~ro3ҸAW#ؿGQJὡVCZ*?6s4xIC J[Lڽ^LSOM2MR.&ըe1iK"N&ǎB:$E?W$^g!7ICE/ `1X, ˒ ɎEu9}Snͼ8}NWź*nQA Dm5R-,J LҔ'/dIj !*`@S(l{aB l(0X؝!l] HD ?hrgI}p_zUH\u34:>#I%ZPmH*0EC.8r+S6"t! &p{j|յ=r1meT:ʀRe"RaSj])FoU:)Oid9D=-dz~d۬F34A7ꠓg&!a#/LUXH12AGl*&-kw;2`#^KܾIKZ}켥E2q6brpfd$׷N[$li4KP1s[.dE}v|"X*wOM$˨dX'2('YCl˝u~E肆q~KXCA9h!EFYǞx[ E]b& }P(ɡ@'8u"G" Qb'쀫Ϸï5,)$Yf[4GKrD6:8+Lʱ0jǸDM.T40(He< j}b^,>$`89ٙ |Z QL]"l;` );TD3<@En"t$㲍"\ѣp G!)^|]9vEHoFy}pĄ:?p/L`Rv4)dFKbU"yosfjN8BIpZXoMȁ!8Q:iR6hSE%d(x `Ȅ)I l2eR΃V ͜^LuфR_Z -4_Zk-9bM 9^z$'(ie bQF*)QAs&JY*1Fآ߮RQLBk4פ8Wy!t'!TƜYe/5\x^@޹̋/ Mi܋d^n:^ kHgxљd) |dTMT[o6F|2~cw"SN&SBCr2ɍ&LDNrk[1Mb{+&Aa}-JZCkŴ+~+V0My&LwhJeVj~KyZ԰kU8|Ui_4}߽{泿ysqܿM͓ܿyҽrT1`u*Lh4OyBk՛y)J2J7`g /,Q9z]-OxB8L7Lg$J_Sg~:0Z=)nd^e7Fܘt@uDo 3? =tfP3 IT1rR)O<{̻lS۬j: T˩T*3? ̣fyV>DؗPg5fMCg~z0{y8 +e;*Ncdֶ癟/4|i)1#sZFe%Dv)R' b7T)J_GKz9ک;a)P+S,Z߱K)鎛XJ|X CC*N9EGW!v{wu :n趂658:F`BЬꥹ4_ K~:ן~/9L؍\2/b 0m ;4 7dɅ}4?wSYVrdᲃ@m?e@\@@tȤ%v/0>S G{ .2 AAIa|Ń.ݯW7U4hO7zpFPFO?ˊFq5BsBLI>$Ngv%(k 68F׎YI8}| AѣmA}XFwPHI3_䊑ԏd%ȎV9uڐ0*mf4c契c^+́Q(/(㪡1mZZ:Hyo>]0"GWף[Ӆyog}ugWH?{sH| }/*nrUiCˡA۵@lHÇnQ {ઽt?UN}y_/zGrLPV=[V^i!r-pk!]W6[+,KIqI{گGJ<6Z1 ZXȒ wF{ pKkK`5]ʍƮhjcq&L2 I4uƠ>0a.ƍǛWyH0ߎҗ{CFWo_oFkt=6{nד~Dz/Kר_G?*~{ٌ1}E/S6j'(- ((Y9hpdT"aZ+w-~>T۸\}Ŀ_Z}-^:k͛_ܽs0XԿׯ?)W;,d}ɷ*l{-'ɨnzlt@kZ6AT`'r # _pj &FI)$QiJ_*N !DL1_kPS2>}=ΜC<epBʊA"Eʗ#K=c_DX9e:zC j2|rQc)\Iܻ_N/l_ݱԬq \8w}'Q:g?&.Zc~?Hr@*NN5&PAZIad=`oC&5$,6h!@Uq0;zZ,H8K3 m a8;;6:M3hnEw=nNA{S>Z @l$ٌd34UkURlcD]0HZp'\fk0QoWN,"%IҾx啦Y5x1A-[ Eb)mӠf 4G׭vF̆C)sFHؤH ,h%/9. %b͞OCi;.6s!$ZN*6$E>Y-rNr9 ]"Y"j4+/^ײְ}vJqUA"Bym"}C(/pakІSa Ig(?ٻ޸rW=%bd,$ٗB IYvQ/Oy.dSbXŲTɦqŽ~=nju^4XsayH8b퀑j awMoR= "#ʂ?bĕrðNp^xW=i -&u`HQܺQ$R1*_?Ic'X?koү#^:ҋ@Lk^P x ҋIO/ ~tJgKEo9c#RHxؽ%p+-<  ÃMb9:$xΘӑ[ls5f$>< $46XKɬUjXJĻ5H@@ `5b/,wR+PJp)vp껹K_39 IHf|0jE7ھoc6,.i4j"swOSzikAy]/" n6 `#0A<Ώnc=X6փm׃[T,20ۻŽH.}j3q A>-t!E 0ʺ`^n&۰[2J*H`^4nNBy9,6Ax5ETULBq=" 2cǔYJ  6^G0.ǭ;wHvavY~jU`Gɷv{/K->ǮA>F%ŽOe(Ckjb-HW nq6oJhe G 3í_M_? "vCGfɍlXY77=oz>co &ʖ[7+%49T=18T_մNlHHP.h8K5>2T~ g{4&nvHO#>(Wa.i~9x=S'~7nP L"G6@u3RYb/6>yg̾ϛv]YQ2H~*Pt_kB7hq T;;0BPQ@ <йP?Ps:NV b{ԨG N?r|TBxcj.09T!C,d4( ARRhqxqAȼ:& 3! 0 -G$@Z< C{iHi@ o32HucW*n\Gb鯛k㷀 %v6o(8hN; fTX]aD] 5F26`6Պ[CtH>Ll.PCIxiX1*^{9%aUWߒjNZP0Ix*D1 KJt$ %aLŠ,*~UMQi$rROW ޽RSxscRA夌uE?Ljxhߤp{Ojq -@z N4ӋOa{ \<'3@),3/"^|Jc,^A^|#?cj#Oa F>J Z|J={&s[ͮ G%s3=g+ߒ9$s=͗oFe"yL#kkudk|qqAF{uznj0jVsIeőϿYTZ6 ci6ѻ]aiat(] $%BDF??Kzuּ-4nE]~mt)1*HŲyQTsX4JxO "^`!X{@I,fC!IhbbjL%Ӛ?tD}(/`S'Nk>h!E1%>2F͐+H*$Ir+"2AFo  bkc90Q']utVSFkI+VYd{ٺ81OʷĈrN'StzEє.MiJ4.A0gZNRk:rt@ Eʧ _ ꆽ lO5qG痢l92&a СV|UO_ 4֝oRz~kJ[NC*Љ7D7&·ziv<t2FMU%Em_xsyX̟&O<1\ij|bcR ߸J\.N(P߰}n;.n Z.{S.y-['i\b γrZz.qq >X0.H܁>% 9^>B$k:cR@<0҇ZHe$IX 7D>xqj ںKGvv)#mh̡< F5{G װ7ċ%r.ׁWR#" D"|X餳TtNHKkKJxU(/2ʗOاGA%}d!D h㊐+FC+ Nw/ȺFLbġ&aZ%̮rnl)̧yt1Tޞlb4w;X93fNO3Fg(u,"cjȘ.aL )B _*,A`8gL)oo3y 螏. @٬_DkVLF ÏyY|Q43+8gORO˹H ǰsThjU 2Q"2.l0 _ˊCŗ vJW󲢀Da=?1/NOe\\_FL$g+EOΨY.l=<2j:P#AH)$6]~ y8[DM쳹6߹92bR*혮W7GMB!%B )v`I(0' $M҄ .x]:BNI^͓Ӂ)*7]/"`SsY=8pAG(slB/~1.x 9\!A9~,.5BhPKQ=BXxQ^'P: a|Z<<ʹHvz'̔ XD8D a >I?}3ۯaRV>Å:LWqغ9]}*=og"0^g[S&y43V5QA (izeD8T9T/AsK_Scma 4h 1 8I!)_o%5J!&/ #cT B_Ȳ`q%5.^ 1RB4_!*Y|= #f}8? >PY& %d6`~M2qMHY4 iǧʽ ׋9,>\Fro=z' @^qɥ~{rs}}dqdo'qg8B+šj,! Њ[k֬ _Vg6j`su BXȨ5`C)5BcD;nHa$1PX =Bc%ZiB%1ǗRYq6O06Ӷb(/`Sx_ #@U-l2Cnh,"~)rTj œ`Cm2 YkD-)~kQRG0arV|twOjbɃσ ӎhy q2[( 7OL3~UODDj @y#3I6RhCi͇Cr lV+"00<\c'<)jy򥮔ZH54Q!֧yHk]KJrꃔ/YP[tΔCn]R pt5)0Ok>s|.{94q7Pk8ֶ8Y7i͇"e}O%JO(1IG3aea2r ٘A Ct85JQNx!\KH(H8* .A0$82ƦjBI͇B~~lL#YW㌺JEE<Ypdq||yT$3fmh}]fXTºI"yukŌ" ۀe\ӧ;-]/NԎQƲj}MMQ"N5Qv׊|TP8q@Cy-fIa8-Pl) 4 Jj@$ݓVx9U$j Ux%ȧDjiAk$ީUbQqOĆ8}9FM(Tv^,O('ЧrdI8X%Y 4=5܂XCO%C>f ZND'^0}OKmu] N1Z߃s Xˍ]عQ WW8;STx҅ Qc>Et^>~)~NI>2F>e ))9e_;ƛtȓ4#cԬDKǕs襢y$ui$jJ_=mP~e{F2BݢseG KO2M*ƕK'bʑ'9~=|9|)3xd}(\z(^Ty3ym1 zyOSς<e[-XKwkU -=EHKsk,rc|扗`y%Rit>.*S5z\ZV|ixB:#`<0-҅<,1ekT)h|gg)zPʉcֈvf|^G38 1g<%3CEC8AV\NQH9gaDq2J9WFģoǁEh' CzAգ>@8z}[kcPP/޾Q49{ezQu9SVʦݷ~z7l.Na;-0-NO=~]kݘeK\ҍLQAS`s xie 4.rF P]zj a-`` uj~t}erCp.S9)]=yz(7( T % aR2X8YA),S^$3 trĥ[`Ds6~Li@ ~ B.5\O`8!7 W+o'O)|ooშ?]! <7bG=ii0xA0ԓhanI?'gW-^5)i\mۑ2xV/r55eZ69(S^;`Mcؚ`RꎀcZc ~9*a8H7 0hD q=ibgwfbȇ΂b/&XP&&fb09b7%NFzL8صF$'eǼj4; ގ: R./Wa^+m%GW?;8=^AMzm5sOOcjDnjO_ aDEFk]%g0F}Fc+R+e/}  h>!Dw +v N/\lqMj+T ):CQSQ TmYqUKg+UN*9ETZMsUqKgwZ(8p,) -݋#eHE1& `bM[Kb‘. X*W- Vk]:\կҩz]:cKM劂û{ue;]}!0ÄJ$tkhlY`Lƈ 'rJƭ~I@ DcFy}0kPUL :&G/ wRZ=x#|7Ӕ*t ln<(J1 @FBhv8эSpur5I?.l^)jpZV\PFkdoƤoN%&xО3 vR\%\ n^] tGBL+Mdy 878МDubq-$?b]=[nGgvm57fT蜑HHȂ_h 6Fm> x0xTN(E"3GGiWBj?SԀȠ˻: R08cb!00Z^ 1@{ZP~%pR!I$HI}=VzxT=ҏ۝9 K 6VւOA ytcZ30'b<uJ@  "PػDDh˯SumM[ϗ'0q'ұ,rJݡeU~b;Apz3ӑWFzai¢*婈`(o#]|TDgAA1H"ݟr$Xkb,`R0*Tʂ"@JtR$G6FcpZcbRN$.gehCDx. OSYaO ;p<3̙Sȸnp|Qn};o|?iM R Om1[+C bIuukgBs=k.nAuW'&L9h[5z{{BES4cJ=X pŢ!:p%w@blLR%lҊNIP1? D>%{il : AJKA;Ǥ'G`k LyjE.2HHr~FWw̽}6$Jʪa{%3ꄨt{ic"6r;d$vB+MUʚQym(KFբVԪWmHj[Ȼ:iaA:<2]MyHd|EB:Q7__q/Ñ5/Y頿>1C.,,%58y"C\ ֧H c<>}uXs0>Qca~x?zmV]Q'3/ i !OdF4C1p$R6dX*\YĎuatoXlqbx v%tlJfXJ]x 7n-PhAcG,)F]Wm]'FNc Gigʒv z:$~^u{^g i2 %c2N}!OhY2r]݃GxWVQd;X}͠~yWRIxM,Vo J $%w^niȤ 1\y;4#GuPtXaAt29xQH뵸kp2)4&Z۟К=Zf GeW&kk"l҃UXd3Kξ0:ZvFﳖ橧ń]/|z)1O/e]Ɠ7?׆sAEvO2hF+&f2;. C=XRB0[>(0JT-=PAEu=fDHk@&=L\QΉGLcVΌN 4?*ZաWZh1VEB+ߊZ5QKhvt詘{ORt.ٽ$$P `}`sG͠6R&d-]ѲT ]QкBsEk nPBAB-hqLtw8Q}$]j_g(&l,k(6N'z]Dm&a"Xj}P ~~ٙNaQSts 4DΔngr$Baf#K`><}<,45 خ~n[M0zC|`Dr!.[gCJZ t*p-O87ڶPZ,oW/1o')xLQ^Hb#"IA{~؞,Vw͏fPx&ƴ: z{dcoRA>d˜#G)4}4ڄK!D\\Z 1QZٿм4!DG`4R %JrЄSg$ɅXuEg F3&eH$M)HA$xߧϳ_ەGcis0 "n~ǚ|Ue^Hdo0%K]>$l'MG`:KqzwzXoK< dqc^σ2]b[(1xd/=SAȩԞk>"6z'1EX @\A͂Ko0<"(- sl΢҂{„6J8*ΰjϋ8Fc4H}=G oid~f=q2' |;EKӃj$5ђP fE #I{-429/%2L*GP6T ,%ZQ b<?2.zc7 BK*%ԉTk,Y#Kdx RG A$2(DuSu7ӀkYF(A%a&PQSP*BϠ>H'T_?ܓz8,3vozmNnoܢW0c~8S*No͟>= ,מ*qpf=yy_ܤ ɋ| vX( ډ 0%Y E+ke@B}%`*PF$$RsS<{4 o?mI\)ynP@ lQA֍Gu4dFk:@kY.(Fh""||Z- ww7N),mLE\KGVEE8V\5:a3mL^'2<+WpbFi 0 Dq>dCHH2zQ-q''|NO{0D9.utL*pU#t =`#FGJb$ hI qMd) O(bNERDn rؽ[Nץ7w:"nR*鲢cPњ\an18 d9DbP8s0ِ莇=F,hKQ$!mmR4 NiP*xM}.ьw]h֝jhՍj11":j3g#)?.*bQGO,A`MRDX(xUFhM (/p p]aǡJ QQG_(S#Aa 4c"2e*Rc]-քvǟqI'1i;y˴ڼMi5rw, E :VeM9L976tHi\~Z{rRs"IeW&5bVT@$THdys~B#ٗ |$QS)JhI-/ nD'q˘3~.c9;0DA9 9,od5TOt &1T+ 2xo'5._?'eŦ-I{\^ہ[[ 4WQĻr<>zta'DwldOCk 2 bL_4C' g$䲰lNmܓr 62|Jfi1JB'PI)5nj.Ǘ~Ԝ}G.d\cK>V'ytQ*F_\6GdӞvD丰hJ;ޛ7'`FNe;7jk~|/^%j;=#㯹C^vL &WOt1/ǯ8I,Nvr̅g'XaBP$$g['vd;B"TT HzpyS{qBٮE}_'9!><_}-z3N(h_EMn vQC5-JM3쟦G?;kX*н1oP( ňYyB!D-d VNڠVNmq{y Q2E(\#wjPL*Ž4(8R0# ɰ9-qdJviR~~`w@uiD.Tp̎ӕBgzfkQ#:1Ad!yܤgۀst.* P42$iXE-F O()i6Jao%$ xh$ruLjAVpk,чjh.jvo-ucjPi,6p.4Ats\{,62C,M ԳT{GS6{Ne㠢OLtzx\iI(O#"8kA|ZCIj#2)LFejZA,gz贃d]ZuN_zYFoZ5}UFXE! !`g"#\s2: "ѕ4f2 K***;K3 T>h,)ݓ9y|ig@;,xFYԀ)h% Hm.=phһJ)OzKփFO|Ev4;.WuI26=<9N\̾y6lƨ}3 nӼI>CW{ SQ?kT~ >޿[קTlaRCz? ˹Y}ҽ=CnƏ]krf\4ah|dGZN0΢;7G/!}X ]߀^N< >_~>?5lo:LSzYesvr܀jn 66%kr ^2/S|}{kHtȨl;p2}B`xDF;8.kU~TwJ{y盄WSJQ ga=V}* }*Z:)Tb>WXTZyyb2w<\,B糕A!JEg:A)tZέk9c P˹DNUpU[ 5zg3LX;̉W{94^ jJR YxƔ!WN1z;>}fz+hz77kpOHRJhs?67@m%-AشUKKt9抗3?!B@7q!Q_N7|0L PWo/2KB(8Ǥ jvHBЈ>URDᐩ$)VR g\"ZֹΙ\ \9l>OiBLV1 q=z|of d"J+#GE i> e=ӺaԶ`CLfRrvIx|_ LWZ:d K)^1^p%"GFvAN"VN3i8^. X2dt]2-0G4d[vc2a *̧+ạ%5 CKj1tKH050~՜6~I2`{4U2r*:EcI tD1 r\A͂L{@giȀs2F"U&sMc.P/s,zSb $NJ@b2-YhhRiRpM$D΄*"l6CaN! C 3P^5J$~nqAQr+&/gf6Oᇰ >sq6އ]!?D.ڋƦ[UE nWVw:Lu\֡\ _LVs06Mhu3!ؖ̆x2:DPҘӁFF,jo"cQE΃Bo0f/E6IL1pE#z^pIF(*z Ͱws`ҐH_I!\-qT)~A3.h8=|2ў+l_d/IضL2'..'4WǏ9xicʐ.I}GsI(P2ZUQaxZ#b"I g)=*2bf;/,K¹ nw^HfLGH{71k1&!u} KQ벭.::]I|:~I|8\&xջ^tJUqg>#YZ Rlv٭Knfɪ:۳K2y2wكpe{|_7 NZƲi@oR& ]念H@Of.l{?tϻ/FyA1ke[2ƺu°* })fw!xSF~.ɪfA~8)J {IXAS6Zna+oODCZB)h ytDT< NLhZw L5+"MSl6}wƂ)<7, -Wi]8@ c?7,p EL1[1oG}E~EߎK)ݎ=aFӕ!/:Xb3B7;[XVfv*^ZӍ[q#,1eAx{ѠY4ǀAUr27N!`2pu{{/gWVS !pZzx=~\V=Wg: ]Qh_5/ysޮ~@3>y4dtg!8k+gƬ\+gƜZxʓqʠΌ1ԲXQ\uqV{ UgQǠM2-*}~,]ȝݨr27,XǭaR ,|s.>7z=!ːRHZok}GUTZu[7֋/2_}hX_D3Վ *Wۨ:*<̟cB:Q1.UG Ϩmx?UqC٩H>z{{z@f o`%?~_Y8҂5LG0)Cʧ+@hɩu[W |<ׅGXfa6ϙM;*2rDLwT %=ug)bxIOSč,p[NZ犐)]4GH9cq)CɈ ɮ֌<1YѦOMvĽ;޵;+Ue->`Ee/[%1h[If4HA[숣PVja3t=ߢvlߢen-aU+*lcBn0zxp1%JNMc~Jgm )ƔuZpuw2zї{ZXzxU$b0(\xs$k:WhwBZq\{^:zeͨ(H~<XQq Q!FG|N;;J"~:JΟ~w5\߭\{Ү/񬲒q eh7JQ_.%FA+ja2Fq#4J':H)wJ[e 2 qe!""Q q);N5j>Xmz\>1ruV6z4'#B3aFRPTxyaPXD-V֑kqкth4kExYeyG Jb1KS8`HDcX:_zR,3<(KKD"i=4$Nꎸ֑`Q+3[FTbj5J]DeWtz.>zCwO3p=4%6]9Q%mSs 9ERM q"2Pn|'Vkx Hkq+d^Ie+H8!dNҚ`t:"=WfUf+zk'%6F;hYAbD^X:Ni,VBE&z\k;bP(@T".ք3scQQA2L pP-hA9%T!k!9f]8E Ap p,XKQ,DdAA7i*z-BLR rf%*2vLtBBs-)&mnԍ)nNi]V#4)VnCHw.E2Սh9 ݭN>V~ubw6|"Z$SDMk'iL⢁iAX|N C R|Sϩa?z953i=XAh}$ ksjb :#\hu3Iyz95 2xAЃ" L# uR4J8s*g԰gʒJ:Rgq"ֳq/X2!w'zDcN\zIk[^$_fa@%k/pWxro>Fk#à2QР9P-TQhHKh#h ,,\bbO!ի+sׅuBr>Ah1qnnorl Io,#ePBఒ -pP t5ՊIR)Px 6fϬow74oZX+ gX +@ǀțG"'OөUi;<ͼz';Us3 i9Kq;#a@o˓9S-%w9L @M$z0ޟ_wxx/\Ogf~\ xӳZU'z 9n-3 Alv:<Hzaͣ'z_.?'kD +\rt/v%UgZV. iќڢ))'V& d?~@PWsP `p(q\| :DERW! | ±"4Ɖz2aKN<yŷ\tC C_uvSFr,g&z'@Ւm/ =mLr~(t䰳>ʪU1x4/A]6ǁyWFˑbL}I ZA;e{C?*t#E+Bɱ)JYi]@^Ό<SN{W[GrjaFқGĪGeUd-ԅ{ʈ/22l=.@'T%4]]{i2]M]csirܯ) AWoƶ,x2 ?ž _!ۀbp<T]n#\Bs:h5Wm蓅~Xϯ-׌l_¦O_<!_Ϟΰw6q<4D$|m*Q/k9Z_8o>gUeQ}J4l9&Pkؠ,IqےC/p_81\~%*a]wƦb5%텾N6>eJ2hɘ 6ŧ1?k j8u|r3U 4 b%M >@ai֨TRIɺ,4欩M<:P^^!F &UWƭF+JЌr <;H!Aюd)Z=/W -rʸ%8BfJ7RH \dT4cو=Q&k8\SS<ۨUFE3Qa-w-C KwFuOO}JTU?R?ybaAe|obձ{f lfc>=ulezKwVWKcxńH ϼc,7g)U>cv/~)#DktDcFy'|DӃb|:N/+?]-pqC5l{QM7QSś"U/bNc&%͘1),MS,єn&Mtye @s(nVf Y`wZQK059jMiܺ HJ+ Fdŧ?G$i^4p(8i"Xe -x֨XiKi1Eˍo7i,^.>/jDd>neYb<;W]4~1_F2 r[jPZR1 5ixZ쬡,Oœ(buJRF4'9$gYIylvB&Zk^"U[Mp5srÆ#*V_UyC y")v Sm,{f2#f_.o/KU>]JJu|J]jE^}!0_7f!0_3{_A 1#+—=ӕ2MS,D8}D $z{B.pqYk.oT7{u,Ǘ~sLAKD"<#Zh4!dN)qp-\h$IVY|i%/UҠ`MUuY?eqID/%£!h,%`4RE,CR]ʌ!2R g&I/a245Gl(i>2"yw$(fXL^gG$xɂ3TP 62JL4ӯt A92bϕ*`Jʩ1o6kBhٞmX}ؚF;Tn)ar[E#StnEFԻ EI}[&/rE/j7wA`1EmWΫJW YM n-$䕋(FLZ">nLQA`1EmW_05@ZZH+QLiNwJ2nHHT L@cLڃ f $䕋(FF[ΆM]h7,jD)m6BTRnhBk[ y")[Sڭ˱eVɃ- L&VgH+QLivLb.Uf?<Lzjϴr뙮8b مv򠪘In/)h wk !\DQ2e Ij7,jD)mI7JմZBB^bdLW4nW˃- Mw* XB $䕋(JzwջnXԈSvmڀ)[Q n-$䕋(FPi ˃cvn`jzwX n-$䕋(J|vnXTlUlf`o-UZUh !\DQ2 RFtݶ-\/&VW.e)eT 1x'5,5X uR/h W?91(g eG-\vt#noOVEн|4He˸?u11#:He*D2SK A'X4NFL*lcfYD•#MD]Կ2hOo.ëz}b2zYFW\+Ђ/_7V%kpxUI~ Z|O/} cLw7.w?/~JМʤ?ͧ.ݕN>I1]w`oS>kG`jcigcf oAZibhO@ Z)]? ,2]$E|o ÇK0B`=jQBCiUpq.>~e@mC EuKeb+*Ȼ2Q3?}5h RL#YHz Ѽu,x (8Eydf(ixwwq, 0Go{! c%ϸ7C|gq*KiVYk|TEp!!1 ĺ Ѵ(Qy[;T)Rs2) [(ѫΨ@:.g>*Asa{Z I^vZ)/1xT2GmZZQcҪ!d$܈|^~L]_p{W.PD*V]R4;51l kbٶNe]y0xq(* b⹤_nG U-)y[`!bD/D${G;'a@ļau&pѤ|g"Nmt}~  LJWb_ J6M]iӲ,4\uv$ &R17j}..}ᷢe$7#?ScEW8p&<PT K] w}WcAW;*eV'b k|`)B!ʊ{--lNS* i;9x*j.CΆ6y'UXk}H#ԁ!a5`͛&"0燄©Bccŏe Znt m+EXfUBR(Y'$J܌F~_qi!!b8"89b╤|EpBBGGF"GbGa6IlIlm0q RfG4 A{7-3]A#k0iCV q._q#PyyI8<,ݢA6d$,#$#Usv>cpc5M Jd˲}%pP=ۻn>܅!F9CvV <t쮮DnP؄݁)A#p-x@vA;~>[68OTΛ.\PXӗ>`_#9iXyED*Nd;A'2ߗww^ {L$r{HcV Nm)GwVllΥe!zG,C ,Úq%R+Nf\2=%{zy0K>nѬ5?e{q[Z/آϰYC'[-f4}h-*@\#ٿBK!G%@/8y!567p _F<ΐ3 EUu$>iǜxj=c4=5ѫ}*F[ڳ"̮7F0{^4٧kj.GFa%@Wwd0(L5] T2P0{cW\^ݳ*ǿKa%BGm|ZVjN`[ N̖r Ke74ONڈta 6t/R4phEP.DsXL\&e cj#b#k//C Ν d[cTnvs wA{5̗&1AqBB4B!T6X__Za$d]+ARhYA7\]胒$M4h;dގHy7lCxiD˸ 4Cp@Ѯ\ 7x6TGgKLI6 i$(PMFcOzw)j`/,8MY`48޼:~o6YL}QB L!+AK? A/P+(<\-Ȓĭˏ.+ZTl6{x"&:]}m:Fѝ G$91bp'bR&șEs:]T̡Ŧx}38i96m<`9"Gq0k }LL9 P)H۳4̍OȾxևr0f9|*#4vɳ/pblBa.<[2P"(`@J(e\W1\ݧ@r<%޾_~Տ}zehy9k *dv*5hDl&,u˨8II4VBhbC(pL"n?s+g8#7?D[S%i}bUO^=,QBe@JGc*HȌ25&祯CE\&<$ϭRu7~q"~I*B\`4I`"1(][,塾XHarrY>a"):<7~ |NŘ o9S0c+ew`"cދO"w}esi6$z0Oew<\*fT^dU`pN ZOh=@a.%EG)ܹxR CBZh?򉡩Q>'$bX*Fg`BY x)E+M!Q _ 2t׸ĨFS(^'Fmb: ˑiiOİR( 2<JQ!K͸E)WxD Dmt+;R@q#$CUxI3A)EhxJm{^b$'n<u6tv߿,y=sQ Q;>F*y0idܛ _zSX1܍8h-zAsAOPgOTm['AV@;t|K7|2rB)Aۻ&̯c x[E[O-h}]rZ!Yy+gV7\(Z)NUlԁ_> x"0Ps-eB"N:X󹢚{3*Ss< 42-dൌpB2#3%Zt~%g6ͲRd>.5ECLÍ~ȬkZ?6 *Y?o[MԡUv\|՝=h?I4@Z@PC|+ݐ@Ɋ$CΕ,<3vh'!:8cfH D058Zr)aG*QJٱ!{z61>OWϽH!TzZ] 75 lkϓl3ɍU)P8(#)IMEkz}Gmd眯)){fM#͓w*|~aE*\J i6k bj.s 䓮"iyTx*~yGyMRG*D9l׌h%]r5K2ӏSӓmK<_h5,vtȤQ\ldA/^HDR1i>;wEH cmlmӵI(铰醋$f?g6kAsFӂ-]j`D-ZC O]` ki[Ѓ-ޝ$ma^ӤmiAȌ&~FB6Pusu 'Rbw~ɞ{XQk/=0ah|oz'|? ث~@fu՛XZh~JbPʎsŬM'׽UK!ec@4M!z?M%~ dbB. OC{we_KBagGPOtd!)Kˑ)˙wFc>_%#m,&0ÐAz]XiHBp#S!v>XذnK ֭. ʈNwTn-Sf7inMH.udv=B]H*(>h"M3V x,JbR--Wu1*UZ Kw}^CUjBBp_t,3Xi׿wtksE() lޅiAXIC\vw(?$<_;󘎠Wmm=Pbj( P/J?AT'-UW+Ev`:?/ÀJˆVԽ\ѲN&a{qmrG7}dW_%CY(eNOFdDRRxzh{Qj-X_FWK %G4(A;CZ JcPM!p c_K7Ί)B2Ij@/GZ"ljqIOHp}MwTdhlkJ gvTr*CfK\m"iȣ&0$.wd|aW#|+vUPTm/}Ͷѹ*pHfx\m)5% J xqYF~Y8JU>7k;W@ɝb]1ؚOּsyo F c|q xys]AoxuDy6_Pd.R`^nOaJ¤Sp4)6҈ֶWC"L!MQeS ? [XbhƫQ݆jE ;c+\3%GKH6mk2JsN~ P{WD`NrT9 0 z &,hYGEBKgzQb"e{ Q7p]v3a .ŨK0ҝ,|@Pr&*o$-,4 S:PSyD5*d #Cb8h9H =">H/w(߽.QN-WJHjS;uIX gW#ǁ[L5*$I4/h^Es?M23"J2 %Tx&:V<3 ZPZ8td˂ 횴C(f.tLQX3Ry#'"jpQLifGh)%2֣2'k@-P: G >s3ŀX0m,xQ*4; e2@P@RGсEw.b$VP`z 4T,Fe,PɞM2$D E zb` 9exq0k=hՠ6J={SuF{I`gsBdb)F`KW \z!HC T}%D H1F9\*sP`u.1%HڻRQ#IђM7d2KI$I0ʐV :yȯ34HԤ~bjϊr6&DA"CH.IrH<ޡ21ڒ,(Dd͝!pem7+,!I Br5j 5 eBLHIOƜq\j)CPjZ ZbAkD@)t<1 ɏBFȔ,@bs#$ r"$mS$sCfd ӣɒ@#FPcila*?yjXK@Eմ(W]nwҿzzxc` .WO!0 L9y.<¨OgW5+X)ڎΚ!U{L&:͘ 6]DC$9rsxm )D~*pZ5;Z=ek<<\lVp/j!}io"KviFFJJ0v:MߔՏZא:qROZp~4UĊ>0%gS/CLq3cC-RxrޅZ:;qciP3z;K߮/<5vgqSH\DȔd-ɗ ZEҠFtƾvۣw bgm|JvBB~":J@vӼ5aHTƑ19[F nH\D˔h0ƕY}ʬ^er߬7=O+kJ\pzOJ}%niT5c[db#'+&+yjŌ<޹r)J)EI-8׉ FD%DH`,aq癫45vd?Aknr"4xڣ͖%@M1Ci0f%f/;dAm'0 g({މ0!@R$ls bR Hp%N`1jt8aiH̽\H !Ir-j5P |$1DfY[0Q160-B$Pan)"het.+/LDlɭYhF #[c j`E&3H94(z!"3Q3<^d'l,$>X"#&(m,9(=dDU[udw%gѯ*%$ ex$AHgp&&0":7#ˑ)iSd}F{b_H:_@M;u;q^/*Jܙ T!2TM\I(ZJRݜ$ fBJ%ԖO^3VuQ-|SE$=|Rng9O2WCOC7Wu;'ٶ=Dm\:uM ;½:-pu<+hKZ}b#cr%66&͖v]MơL %w?_q|䒨s:F$ixsFbZ_R[-_!5ZݏX>mkd|MV:kYfQs+XE%E< "hQpoDYFND3^I['V3f@5:<0"iEhL[zKڔFWZ*B2N㦡}I W$^IcIܕ-YIXVKt= b3,=X:"[#CoIlceUuVհjƆO^\_jWُsFkF=]s=}sl[g19yínꝳq=Q-o}q;4Kgw~V)!SbKObJbZ_Y;I?‚|=v賋&u"⸱؍Aᕅ=֦_ X-83\)7(xjjA-(4ɯ!Y ezc2ѳeyt DM20Ƥ*`**ߤݵ:Qsw Lj>euSeVowS '=!dfZ9:z蔖[ %ȵk02;<-#ڑb' [r汔#`"_7-cynǯ/ަ$s`wWVh;^?Z;[gCyd\rVQㄆ%V=Wc[Q;H}i[EeR?$rՅezܰZ3Q%64$5~ؿw-cOZ3e2$Eftw˗ryfH73 /鿟%]wE"|זAygr]L{%dfoy&b -G tN\+.f/llS>9}l5Y1ՙ[DwSwEt`(BpZQ`JFHB ˻}P@I#Mp{n!l8K WHSY)e&"|[Q |՜->8ʧv\N{ >\eg9Fu.W2"iFhx?gַY4_6K,h|9lQ*+Fah*lH[&zp+IvroU d [ϝduKۙQdzg{;H[5I RmW5%J:㽪^ZzU,>`rbnqKjPksq=ՠe`qԝZ~ņc؋_&N V4t]硳n T:Xc^j4SS>k@DRթ/PI:x ^!>-5hgOc>"W>o`-ʡR<6ZU"񻥵YЃ^&WrgW76Y-c,ț ziZ:] PR֕N\ %&zI͙e02^~|6߮W~y_&Ro zx`gd'au_Ht>^ @cO5p^pyC:YoD9% ÿB}R- vcM 6n,e2 XYzLE]jNBKO@X $E"M֯5[/Ⱥʐ mij$ݤZ[WsgH:MwbW-تl4ZZB"=*yьA*o?N 3z$YߝNwc4zWWQ.pEHq_ܯ %zHK.=J)j)U9fD9;\a\.yq ! yupr*" FuRΞM)3A0Džu:wnf_p&p?i3f;|NIX\g<Vn<1pڛt4M?HR}[UyjWO80g%.>^Dk-ӏqU,OԼ2Լ2Լ2Լi ]]E%@4&0q#%CQyr`,M劤?Lo~)UpUlg.gMCL֏+r;/ߦ~: ~yܑ< cQ lX+n(;]0Jds 9]qBng1Kƅ`2}J %)SQETL$-!3Lɑ/dJ0iƔuH&<գȲX#@Gi, +dIB!G2My ]ѨC`0,RREk:^;&bGji]횮U>r\EPNPSf' "@\!#*SFf ȑH(Yb8QA:_` F mN9"H hBG\i\SUę]5CRj 4Y]'TA})=_-u~~2˾<)>)eUe6OϲW_74"az*}|A>F|(Jvz}cl #<վCïopX&2tH{w aMw-3u=ҦT))coou[c;mؑk1QŀK8D!؃b,S(! !%.ReyiX ŸL0%H˹yvư\G7YdHBԎbb *1YA\1vx( 658"遯w;1Y<&Ajz{0TmoC;]Ф M` w^o-[v8wcрIpq;}Q `E6ݥ߃q\ŎRq~h^ Fd$ʊmXjL;@#DV$~⋗@vwqsk~. _][ _I t:d:\EwDCȄx'2m V,ޙ5TrjR6Zz w,K$oz~#uwYҬhn2f63[mr7O)3f?1T=.&ۼ1S0˻E;*M.M]PJ/0N,Uf*ê>lmi--UVr PIvXnP'>mՁi-Kh`șS4Sѯ8wdt -ꄾǺ PRެ[xRGCΜ99DŠp{sIOW \Q=fq%Mݶ(Mavq%ѯyzw˸-S#qwrz 7n)nz|j`GdQVF?ݻqLϨQ):sk.@#RpKUYof,?hqeʒW~R0pB @ } S`%q@xJoލ"F9E9Grp@ݫj$Tq <6^so3lSuƫW`t4~ؼyyqB &* ;~C >|FIGe%aG8vP$ޜVrK*r%VM3эZ ihn0sr6~twZT9m]3Ytmk`8Z6rq̗HPD\Ǣ\Z8G #!`"IKܝEbHh qRJ-9R6Kvc)Ñ:#[?{Ey^ _ֽDŽ]ԄY+&"y`i-ֹ^cl7!׿ϪW7M_Waۛ2e.쪄`ܦJhbE7SI]U 2 =J(ڞIN쪅YQDCUճҼ *=CvF b `)²C6G$@ln֊ް7Qo3I.A!;JDX3{j6īʟU@lצ .* TX#E1*kS.]^iF^jSKw'ru*G#⿨OtwRԁwu)juaW]29Z5kQ߮&D AGUn]C^t[r^o>yb$jY] =9YFIw$7$Bʇr+ӅV3Di R*xA9&  V HEI,[ VB'^Q!k5r wjg"z g*T$Lc8),-qeŴHIǐH^ G#I3B4T(S)s3"!@IF2"'(T2>2 `5`44rǸ(Ac4zER؄2LD`(5c$.t8ٔDcPHmi ](Zz)~2y_}GUYX-.U_?5FL0D$SY{JRl*/?Ƅ#,qhd&6־Ǚbc_z *q]|hYI8~mՁ)Y'GCΜAl+]qcnP'>m]zn֎֭ 9Ekr !o#; GG5|j^ S(z,|߁J:*ӁDm'v70DNQe9yJeKK)^e%7b0QRQKqۮ=YJKI0DH H >YvRx,%̍ԂN3i 7rMO%[f)n,VSg)n,efO:=CI7VR 1Op9c`{R/jR3$Xz,u?An^ብRXJQ9 zQ!DSd[a)E`p,$BY 3>b$g|()LLSf)DNaEFR/vx!x,u="%[s,oon>eqTI V6J=c枱`f:N0g#6ABR5an!jr#OP pR}yNPNw8m?Ź7uN0Z kޛ87-܊aE)pI c5|ԽFiS=d6o^#xGh~hC2QD(SFT`Dɿ%!-d C-ܐ))RX~tb43ZK-ᙪ K-idĶ:p M Ʀ5] {vw L$)xP@'>mPT[xw!AdwÊ#zP@'>mU"5>[xw!5L#)le- [ٜhnre->(刕ncr}%wqD$^QBa-йsQ8ǘR2szOip&xfpԊV(dDMؽed#c?mZZ{}ۇ%I|QoIm1̇gXIlt)'?c!Lk h flB5^J*5 N%qY׭H#g/oo,-͖[v/%lFC1z ȑIFw|r\Gvʽ۪(M@ z|"p2f9'o40I0&pZLKH4Gzj J׍&5qKyEmIM+?{wI;R$ءַ7Iq?=#ƷY/3--- y_D@j)qFts"kH4Ms1aRPH|w; \/e2 }bE XZam9gޤoqƙ<9:.%G'T;9:VgQtDB/fI _Zp"-;i )U,3*IeBdKfkT"J8HGLHC; $QR`ױ[0tto%J{9t?[`+"k(`H(r(9fGg|K7fdާ#<࿈W?8즶;&ʺKm~ 5eoىx!0F0ݭ>݇N^ߜ#쇸ro\{SۃSԯAz X0Dyͼ;WmYV[CYweopzZN0^q>=Y[i4UmV^ٵtwn3pyf<,F~Qι"N+}5s,IG4I%LD) QTY&23b[o^lȣ8ӜaJm'dZs=EOBo$iR%zY,,XKl ;>do -s*KNqxA#tIcefVDɒ+K$QE 8'qҘA9+=6ye#ŋ$Cg)r (*%PO3idfrbeyFʄJR3;f&q#58z؞&Bc>>w;>U,nR7[PRf}#6*{=nnrTp:^ՅTS3FUfvAT G\M'w^ՒJΆl,f!OْƩ L.uI#!DruzGe*.>3Yݓ/soە8MqODoX?ww܏/ s0JF9dsm73 dͺw=ުjulo;rStkܞQI 2E!JEi$g,,1j.D" uksG52k{{Ta$u(V(poc3)8fFh"S2֒b8x%@3@&2!jԠ+6}%-]۵M\P5۫ShXB/[}0b!/ 0Qg[KBҟk߾0>qۧwI3ݿ|{ß@XI[!Mdv7_oԑysfiT\Q/[]twgev~ 4;p?DJyQ|~R(V 8CAmmn{'K57ƭT1P$#輓8d iȭP :Lhȉڤ8Z:a|T%uEx@@̼CZi݄f&JY?ڹz6̶anu7S~*z Frl[D>~m}dhֹֹֹֹZJUiϾ 3;+++(Ry2ŧ\WxT$$G#QL 0fFq1sN2E5X<U5@%mXzŶ-ؖ;]T c/#~ZJƇ+oﴝ ˫a1:)`&V"{j3C3%JX`7,՛ `ޮd U&sVA#^бBB䊀ԕ# 4#9c?[?2!z3jmZo"V)^e7ȃc=KMŔ!gGy Tb3wLO:nE_֦J[y "bwoSn:pN3|ۈcJ~-ѻuw֘b\#ܶ1 \2,Efi$ҹ[ .9/%*S%-sc[^?]2Jּ9sׂVw{0z @ރ8{-vm`q){,]3^c8<ÃtQ_|(*^Px>֏ma/[|&ϋVG3 񙬟qg>E"∹˿_?^'}'?Rijkqm7_J߁nhgARN`ev،xzvا((>?~VGysG9.՞Xy'u!Jv|I*tc$ؼ> hC3$}MZ`3"5QD81خKJI1/@_]^a㱛KPe0rHEN,钓(Yb&&Iʣ|V&5VjS]6+gRښi Lf{&MӆqqoYjUR}fgmV85^Œ=&.&HTNzO_y22E:%ncdEeC-=eJʉw BQE@Hȣs\#(I&OuT:iQǀ+'~*TTh3O'ݽu߃tIpZ1(btU H.SS+ #v@M@dy<8a%r=ᅻnl0` 2Z> ԂkNXhwy%0lC#8g 'ŮR I0s4>ܵ#LZJ`ܵ1#wmy ”ғw Ձ tBFM+}{|IGB9D0UIWjĹݤYM:nc"$K\[ya;bG;|0RW68D1R9',t2wĢZa5&JI}UR6Q*)-?i{L=UesF顔_%{݊1&R_դ633JO@P ԢT4P PRBF[C1{ALNF)n(@'cR_դD6Q*  4 tC1Ο4J%u, W59d^=6JA)@)h7RaFIJY1S@)C7RgF7 W593JO8ťbDz:.UMj}0FDPJ Qz(e^5 2G_ZHMA̫F)7rXJ$=}gZ۸_aD4.P*kRSUN)W? 74Ф=-hv7E)[[s..^{p?O%ٖml%\ ,qv)NɾP胒]q웒ꚮ8D{ N('Z40mߦY_f2i8Mķ%,`0ASI `V*J.ea32>ka5nNB?[mM20'zR h TW`*T͋6(ɳW/rz60{^ A_:yB.9=@9C){s5;Qrg+ tm6hOaNܘ>ڴuB IEOF,!:Q#"Ӝg %r).H-@+R36vZ5|m&dP~P֥2;)@9DjgDY^L#,-YT CfXa"s 4OJ 0ȰLeJ5Tp-S p!c8}FCC@nKOR^8`Y%I*+RE İMf9` Fya&4D wUժfaExPY)<ʹoTe-6 C,JbcWb%L>965󩀺ׇSná3t#I:trcQem6Wa R_=/ʶ:X_ߍ]QeAۉĈebҩ|^z{W//擻ˌRJ1)a,%9o%#[6KМ\蟶7? \WՁu!5mjCݪ*b?5gtys`~Pcp>v$(9BٕRlIM![c%56 )\VA Bj}|(,9=jG oK-Gǂ+$my~}Mr?XV}iEȞcRrMNFѤGICv]~6˟W^E;zy[voAFuqu`83iАW(b+]&=ZX |T'.J;nQ<[r*\"@ `ү@le O3`0 s!wT yI@^U.6ϳj3HY.|O:[DYϛ~e*T GTv`^hָK^i,~5Fľ ruð[˵Ѿ\|[m6VΗShPWa !Fldy{_!, 5$wzҡ=T'`y?^~mPF//^}J毦պW^jZ)ίI j<V.P[g!q-vZgRzPLЩ?Lfb9]Xh}.K8YeK1θ4 ~10t4Oe@:;G!!a7Q^wv–AhwUst,Va'xFÕ\ wyd2=e'A՝NMaZ8:?k<( RqzF<`Tκ00Ovy L?mg P6lf>FjB`k:"aS9HKm׍ΎTC5I59:5XԌcq+VA&:)jHnvVbxƶD@B;'yr*SM;w}DbFPLk/sUyDw!2-hȉhNy׺znNl!֭M-3ue?{Z64UtKoJNg-=F-eOK94f[j&:kkʐeX E4Hj`ԦA6X(@IuZ@" cFH<%=ϳOq5 Ϟ|n'_[}[n2[h;~>ORh ս}VdrGwrq}zbqoӘc1F\IUؙ>d:Zd'}Lg/&Fʹ.TimL/ KTHzpeNO\_<Ѓm)|ZطrXEԸ\_9>+, rVxr`Y>d%HJFAG6|" j%8Kv "\V6=0?*׽O3wjzyW{rmE]i=6St}cөS]o4P!,K>+Q`\P%0V"QCeC0rqힷ겴uEyeZgVP*Vs($Q) "y!0_HP)4Ed&ءk@ y}5zrAm|5 ވL˜x'Y7rpPlC yw۰XVv"[J.z. e~IŞ}W]2ӄ$ܛ?B˂'z_.5\ H˚6di̲ױzYdt5H+EQE#s-_ySx)Rn䀬ǃ%=i>o,͍B>MGb BWg67l1Ir=tŸ]%&"nˣf!%6gVT1*@o3?{8`LMOgS<@10L~򊱭Hw4O,CJhF͵%4-kFj֌$Da$xg _k >y Ð7M}©{Ħ@*{7aD߇I{Z\j!1-%A|Ic1pFHl;K?G{&nCCN\Et9Lkݔc{nNl!֭ՁSF;nW֭ 9q)ܱnAKu"n"$n-֭ 9qmS*>゚ef ة6Z8 mO^0ϹrKd T?dڝ%4HY.|O:[42z^p{/gݦǣl_|3ZIK2]'cIs 0ڵU?9bcV7W-uݏHb짤τ)_F^0|vAmfyfz61K_/~8IS|P@D`P̩};*k٬ q! ː c ]HϦ+Bz֓_5Uyge BuԮv>i)8%kcے gv .(N.&.4U"b[o߬qi\by/r7-k6gEQM/.thOת>Vn~&r xW*sKsSc 'i Cg?ۥNRqoÜ7S^>f܀ő kec%F"En>(RB'2-@ki %U#Hp4;O/잻FkQbB^ LD`P@兔6 .̿,DEd $F1f YC*N4 DcHru24 $:$ށQz(!@0K0j7rH\,_Cpn6Lg{le)mՒڣ ]5RaHV=Cmt oĩƈfqĸ28&8-:RڏPib$ Q#rat00{~Opt (7( 7n\ZN@ %c'L2nK2>6D/(ԻFgk9w y-1;;n*#Íc5/0^mm+ ndNCN!u˭eբ;SrNê;ϖJqz#-Oki"dw<[!γU<ݹ ϖE">#O͇W9`$VKٔ 4͎oC~*^C:Dt}Us`ӇH+_ÕTV/_5uҰ;8BIIy]j*Y=$Ձ {7|z翌M,IF;$h!8D0U=JdG~ǻ ̻?9z6C4S>{7I@[\ .fquO*7 DqlrgdM7Npe̫)\8uO;>y}Vټcټ:T-ec|^&B0I!,m&' + uy'(=sI}"00TXMwʝ{OʬEotH>z(%cR3uջLIh2lEzo% ʩf*8蜚$\F,gSbz=pbeJ`"jɢ8iU;~ ]8;w1*ʷ@4f;;`f`{5]  2SL D|!SbdLD$ \pЋ:r{C 5$}ÄJdT(=&&yU%qDET;TϐҔG@-5e fa+ jD $%ITA*t$&B&mbn 1mnY9%r\.{nE)RR%c"TKFpJb+1Qǥפz<QJJ];uJORRj%hQM _J PzLyMjN}eJ9kg%Oz^G^4JqGvbyO_J*#J/Ҙ9S9SRw֕RPʊ=>CJwC)+^8J8\ A["R>fh]YqIuId@AYeU BT ruL@p_Ha4T{ fpVhq~MƜސ$M`ĩqi@NHJ=ڇ{yeIHO#DqRC&n]er'ۛ^?}Qio\A,#ھqp f|煗"-[`\Ni]0M6z!Gk0 o>)4F[e{unй`Ѕ%&@ED) dRf?LG:VHcyU1ML|F:P4Z)3 awrUBDXe&pJ$KD$lYLqg7 @brW˃vf*k@`3A !\ &:b1e<Xcș0;`%4I#3րDcNՀ1o}~*@"/}!GG(ejWǤפ6z ce_zTR"zEJjԱu3ef,I2URch>d%;65LQE__UfEiDR>iSm&$13oԎvX9raU2blwN:|{FS,Hq2ͩK 4`Z pg~IQ0!`CKSf=Dʥl @oߠ7JWhHDjr8:#Acr҂l#R-Ȟqykqykqykqy[eD@D XQyF@rH_YNlfRj9efH9 I`"DeJA YGcR)UhBٯ&ſo,juU/>ns=%WW>grk0qTl?:_ٌSӷ 83<աl&\9 f&?_eEq']ɧG#|f0193g,P~>-Vǯbz'q1~.8Mb7rPMadGq B5P#egGp7W/Tڃr&ɗt?l\?c(~cLJ~H1&7_W=IUb~~u'U%8rekE>|:6߈\ f!^ڴb?13Qm& ߦQ>\#!Hˈ5h&hċ7"۷Wr"?ڟKۼEkõ;߼}5c:ͨf\:mf$0ٛqec~99M?ͅvo]6wdf>2TDM/=0PqoerqO+~9 CKxNS n, );_~h* CԬ<\ʵV*bpD0J6}bm4wzPOk2SE }:9{*Aj oRٗOwbXRP m}K'ݕCRkAןu4) w 0hE1סfG-^\4m> z> A1gZաօU)>7$Tߖ~Lv_fE~z?>|.be.;'Enuƃ)||@CdۓSGً:K=I͖pRK{8KsM%gt=AŁBr]"O뇧idF,Y]m2g .&t~:CJfqT𧜢urRY'SQ\63k[:σ?ߛ ΄WFs&ho' '1NWu{;۔c\߿?HR/#Ddj檻*rźs yn &wLBw-ѻo5LqGLpbiǤץ֬z/1Ҽ\vvv jrBMsgk'>>1Jj4G"ZNdߗX)ieO oc4O(Y+Ѓog}sj vMla۱x7*YI1lG[Tw#xy5\X 0$xv#'̂{3GEz}AՓ|k+w3`{Mm-f\ F3 vn^xd"4TvB{jȎ ZCoHIlխFA#XiB g=ɑFg @3RK) td4)EDQ*PrQ/1odExrX9B8(N]zҖ=-_+,x]MyBy]X*S5j攼x ٵё,,%^P.(,`D+\(]?dVOw_ B$MJd} un,ڝk\73K5KoRуQp`)G?{Ǎ!υy$ڷ ua9(F,ߗ얝RDVUEj qbTu.oWwd1eh0OJt[{LgR6NἙF Os|jL\t ywd%c/[+.0u Acm=lYMuJ^Qժ6;B`|#hZ,)_ذ "cpuLJ䅧2]ZtꤤKH8R`~Rd)Al8Z1_ۻ䨗اǾM2,/}S܈ge%K`T{8_NJkmFt[*v/Uꖪߠ}z#)'9p)MPBWQ73sZph"׀RUzKi<<=AQ3)kr*)Oς8㽩!E.ڐvPj5etN;sQjHzzʪw4ȩw>w|sCxb?{|/}~}P~7ŧۇ>FUo7G139L^H0mB{cB} UI 0u0 ͍ 2xeP lʢ _p9!0 @{h]c(kM:1X&"ЕheQmjOjEޔDM$'>Czm:kX*CȔ(ʽTr^bդ^;'$Um' $|5ƪ 9LSI&L*Wxl{|Lx)qNqD֩5)vM{G1$)fG:Ξ+ҏNơƔkUZk;$s 3>';~}HR|c!Iu iC\kGgfK@'ۧGa3N˨x-v E9C5/; 64xy"4Ff9bs_@2wBZ;P9WV.0>to\HjG_Ϊ57ԕ6bvk\aEוKU"}=([]TyCJ ;:nyPPZg]R+4ڰ Ͳ4uUK8[ S]u:J ^6ŸRIy!&:%ݵBKdn KÉ=\:>+~hy.͐?[ݒ8Hs}eqt.nBk-Xd%#Ӫ9{aңfG;zS:Lz:jv;Xm. ÈR2Q˵.MSVXM Eu(kP)(Eӡ7;V@^KDi(UzRY7{Vm JRMi(մwKQrJfjK(ENC)rDRn(=e֩͞ JҴ+M_:7;VFiڹH\^JOY}ge^lk4/žZGӧCio T4jz%DYT4n& J4\,Pz ]F)cJ#J5S1 jm.k4qďDd3>uUӖZjMZPmNkemzAā<-40ERp %ͦMI.0PkGcu&&ߎN0sĔZFxE$*,b0RR"h:qMʋ%=!!8pY]Q$%7(tmߚSr\brL-ʛLVVHܔ^i k磷k`ĿVɴ_hgV ƉPjCsYh; 3B;X"3)1U ȴ@?IGY8t# ӫ)'{UZH|V39gǥB(WZr+SGX!/^~Ƶvq|-2 him y[g6b6f|޸r]wgZJƓ \_(#Hٲ%}vѿvѳx،AD2JVzw9`aB1?_(gCE/+&Ӊu ft^n`GՂV rLX1^s~4\xʻsLOJ͠XYsz6)QGҥJ@ץ_csZk̈́Wq'4pWgٓt3 +.5VyTb`5`VPve-T5Y8TD #Cso_5SRzv}z`YqrP+ʳ_9_aPFVl˦mͳi{kMR>J5 T .'8B"4]%pWtOvZ)fvL^fU-Iu41Vp.U"uׇTĜz|s2b4 82L?v̵Жzj TJ܏Wi'`4m˟XX 7E/]Cb[/V_p.]廈wm\>q!/uBYQY,h_IY̺ MiM]:ϥ.]#z&Ֆ{k~ذ\P<|~z|'`b`pJ{AitIp@xFH(6±cք@Ơ DM8+7oOKI{o0BYdg>Q48$8UbXHa Q)*ml07'!wgZcFSJC󰚮6O}dNzx4텬l{>&;#X7Ev,@}zt(6c :G Pڿ `n=tO ~&테̘ѵNhg¢ܾ41lq/x<- ǜOD܎Z.{I:[-^rsk bfSVZ]Pdk(j}iov$.iBCn#lf4eV.iws,PzIQK%1 uhJfjC. ,J[+, %fűWW~H|݈)ZIwLJ硥^Iiqq@ZMZh)mƟ 2DKqCSw^ôĉ`5`ۙA313+h-fYbwn1Y&~g{xm<[0Sd'ޞgW%5Xa$_y͞7Hw\5W?^3.|Y4ƣ#5 5Zr @޽H^R~Qlykke'xU\hSI)S P!BJ]eIb5@_^gDJ0Θ,#ȭhnlnW>Ng;npLmևHR:l]ᔿB|A=!6G#rb[D|89l#x(×L`,0nh6 dv/9xmk"aPN;}/)(rD~$xW+=qmsNdɾ }$*Նc'8mNs \`#<(-e #-c^(uפF8R, gͥV&jei'Q:8XNVI4悩HΙ`Dy(ʱ\[{mԦr[mG Q[y 7Fp7f kw\DHP!pc W>nojh5)ZiaB[LW dҾ5j~[d,͖ IB{ss trEJAO?~[ lI''_B:() {ӱbN)`w)vE!wrZy2N-T9Y rG>9l[dA]1UVt1aNFz12cUt#Qz,3Z'b2 @ ߭\-R1<߸3ɯL<:yKw0*R30{;.er3s  LVf<޳T\='A#'AݠU&.g-"rx!nKg BX&3ΥVI^IA7yX!\2n Rh켗(R$_Dr-(dȱQ(C9/1e)RZK(`c+D]n֨Ƒk G_"lRvRZjPOԏhz J(40${}}2h(X8eB8TBJsI #(ת׋1ajxn9PzNm;*?g2z(!0-jB >ȯ\uBӊ&Ԍ)S}Ϋ~S+ ?\ 8H:HK#j.*Pb~^!-,+#~')4 >[8qN&LΓǡE蛡Gڦm]Fj(dP;ߥ986@2ˀ:!W@hI/3('R2Q -hGF2N#l$O4CON`'LpB&W BF)CZfQ%sv˂2;d q\#66H95h Zz\gxQTIal~nSd/SdWm' v-Xm͋tw-߼/l9Cdu$ u$c >K<~GEOw.h$-xݥ-w'WI(XHpڕSScō'ovj*v6}^QkRG٥LQ$(X_rghn#[qPH0H p0i-x_wA6ydXFnJfү7ͅ 5 JD¥'Pͱ=֡1#}#T"wm- ZK v#5 ! &G@L7`(TAy)Lܛ(b؍]˘##5[BBKCɎw%8SՎp$BRGJAGqt z:q_p@(=B#W4LAcIc-G_THNѽ#O!pmܾۘv֘<bgMfJxʭD,[Ʊ%rN!9$(% 7ZI70F 0DkEd*F2*l$6\hr#BrƝp+c!3cbZc 8EbDRpwm=HDvV@.8,kl(Bk­<2I!́urJ E;W$c[ #ؑ8AR!: %ѡq["-Bok@;=^OGǝOwv2;(gBN:qݡFi[5z`Fj<أ|?x;%h=YG6qY>1d jD^Y4,䛫WP|=ns bU,JsPZ p^X+!9ꕈ5Pi8a?Rjs\!YGJ.)v^i2dV)KpY.S2b_ ]e2|/,JЊ_QL?/XHc9(aJ~FS>?Gu*6 iC]Wj=#P%Q6PMhD.~S|E?Gqp&mmolInWҳN`!^{/Y~D;qpHۚC/Z ;HV6eׯ 6WwSL@}O!+j'ρCZѓ"ECP)puCX:Bb-, r!5B!\.r~Y)ֻj7z8qx]0$s)<ypJ![f9\C9)D)GF849lF)&V#P(c)*D -L!MjC/o(_ٳ@w.F<|mL)X{frc\- s35ZpO0'\ 0T1t;e+ut 6>S/8a~2 4.eVP7 .?E_~~UO9jj6z!#O7W U)?+Tm ߋ.}{p'")lBQ Ixk0Y0G?Vض'e3b}< q!A6(~\XDY9o rFa ugCbŵ;kV33a*2ZAPDOrj뉥 IiCpϣ|[Q y#z5j.yo^Ck\G\G뤺Q7(5f8%HsP]8Zy(;ax]JeZ}ЧPM+ Th9XN>;j:}Y] bDl2)쥰$';zIB5n̿)Ѝi}z̃yf aw_Ί?-{6?0p̀b+fgv< 52 W=)(}.gg;@Y:O IN $["D'[ց[Dvk@B>&TZڍRy#:﨣z"gFJE&f-.'2Ӕ CyKt27A&=MLx%Lyv*EE Ƿ"&`mFVgP세Us.k=jM8{wpsItXDqK6`3"*1vS訰E)Ec"PK\4c.Zu<+lp"ݻ-S㰐b5\:hN/O//.$ :<ڠhҽQoWo Hz'rZIܻ}1/F l@d9A4OgQ9dn4l;u虘W3ͭ'tσgR>jb)t>`V;r{/**} ʁC8''cq\V9HEPx6 ℜȐZ{C叫TP픭ړ2e6 WeUE݆lk0( g㑶_y?x.].F$Rq`(ɰs!JA3-CHNĀg(˥0 __nCZntZ1E׃E;d-9{;2/?Z|A>'3܋KThez$eIp FT:om/I$W6H6֩<{mFb"T*v:V!gȦPh+Xp]ݘD*mRY* QР2mRQ.qM&L7q9kAPͳ. sˑَw޿EnP{H(A.ɉQhlN@ty+Q65Bb@X`VӶK eM`|*%R1VQ+#9|zz3'3P88 DFIh:'ypμ2y2_vNp>L ޕ5q$鿂ˬcPY#!qyQW! . J'oV$`wChgը2+̬а|loC *zU#w{h>،d%a R-},kFAZT`ӯMXFwgm!@k|f Z߆Si3ڼK6v?0ahueB4r/aliZqȵ|kBlv@dLY_/}ݏlu #wq:|&o>=jv (һ,MMz {>??w_V )9ϕkw!eF "OPG( X~^}8{Y۰`()]fݐ%CiޖGOHK„-A*i:KDd1|%PIӾwq['|Ds:Pԁm/[/ 挷Z0z $$#e+54_0R=PԺ}eUJ -Tw7)f LB)!&-ٙ*X~k%5+@ '2; f6SHk+``IIATIf4@gkM!.WͪƴVs[**[.p1+"7G_QhH_ʽ96=- UƏܾ>}RR[(TIz<fzNJE ?9t[i0y4JTZHR*h}{ 4 MR(m$SRn؆B/ז<0fO BϞ{ÛI5Ⱦ;uн#ة_?K}mZ>W2:QndU!|i)!dd& P<.E*Ré>pǵxԆyŚY{aXS mK6G3!VVr6j=cQKY$BHALLJ" Z%(t^<#x^ZZj?R}S6~K)vo"bJ\ i[EgPMhNC41PjѹNڜ('^ #qe삞 2zzG9A+1Ġ$88xDb".f%tڪh@ЌΤ/ZJu)aD*8:N >Lm=tAkd$I KdIm/J[׆rA2gQFP 4iQh*O?9Oxy#Pц:o<~}ֳs\Rj;k1mx;} Oe7 n#vΧzOz43huB W;աsv蜝@g!鐗ӊ:@+B;rPᢼuAKf_?AjaSJ\=y]1kO?"zus:dff;J_~~=ۻܖ_<_iVrR+M6D: YNӃӿ5BL~W} JH֐lW,<+Aa<<Ȏ@aְ'kYvq\3RC)~{lU0ו˻,y$wOk1|L<2fE4PtUePM:~"\G|ЃQPa8(EF0;66YȰϋcYimSL}[Uܙ%3t!}2d9`hSNQ&p+cuNBSO-'QM͗zV%~lSL1j_+ac#LK8w8ip**r=p 6C^ĩ ]Ӧ+5zE_ yɍi^E]ÁރK^A], LhK51N!0K4*猗+iXn$-Œ.M(>d{ﻭ@+D70ѽPѴ{yNRQS1p%Ssm}\BsI;Tj UlWm\+_xʂΕh\Lj JQX[Iy"黧L-9-UbPkrQUY|.}\;mA+m0Z :~;4uȇdSM9d`lX:+A=}lԞW{8\qִ^;X+ɍb]5PD;%۲"qrD*_rW];T)'Im$ =b4)W*PrVr`9n9EFm;`QlZفħlR TZ9,LΏw<\DQ-(ڽMAqG0> G2P@miJbr]րjĞKT)KW\kq^O}7qa%2+gKB Ul'ZGwc\?k'[tlyEe[4ߎ'` }6^1H"XeըTjqT.5u|ч?|(>!9N.!c=;C>j0ޣX̆6/b"&<^ ơ/Z.ؤ!kRoY*lcdsht rމbZnҌCϟŗA hiGj:n^ɚjIpS){ܙ\g*HuQqVVt<]9}RWe-M)DdHB1x%T@[m#ujhqRf@cCO,% K6YgHU3%]a'_NMeJ0GwLZT\9UR ;-ġW[/[} *Jbx0{4_hРpăֺ6AKZl~0M-BIdOR}8R[Ñ9CpC:̣ 4@ߑEFPyX0QDhtt(PӅbgPq\n< woX)F86nѭ #pT"!Q+ "Ӂ~Ή R[);66.vnCRKCȁ&dDQB"\Zfܹ@HBkkD8IGB+`ƈhrqRPZ*`qUY6T\QF4j*ML]H6X.UX14[wy&+P;\)̄;1Jk)V4#80噪A%p%~ _!_U7K$2"՛= _8OyB,hTAp#5F&aݞ@O3Q%!*כ՝VYɗCWa="?G+ǒ/*^cC\L[:q(4"*&Fbh #=X yr\n8(QYZ&c"s:ܥ~~79tu"s aUKύnWtN.#{{".$EHH϶}J^J>"ZRM2-IM6|=hqA^]*F\:OV>فO `0[y|-=pq2qk0&ho`r?)ɫWap'W^N/~ ^fI:7s;\37䙛?NLKOR}zy̥7ie}XSoNYf*:b'8`#'t:\;؟%rbΙ>Xb^+}ʉ5$wEPG*-G6Zb H3ãz6 _aCipZf93͸!?0#)J*s.ztghs`U>Lpj7;d h4(F<ŞWqÅU1 [;jp޾|^߂l"*v6g,ZZWfxW{'DhHyzf *ގïQauu9"?gyc6$ \^6r{g_ORRʤ`n\Bu8 M1Lm2p0*Wt B|H8SZYA!! m-/-xJwL|`yx|R%pW骽y`Os P>Rڋc֎S7m{3*s!Zɇ\&+ێ1Ox][um׵5^7q[DK!8)\IɬNȖRaj48k*YJˑ&~(ďG`AXf ( #JeX\ɝt\T uaB*zq8y '^2$G~``!&B$)Pd߰Sf+h,V2ރOp{OL(Dhu\'8LpU2gn(GXۮkOR+Ft\h\bEs5$5g/)umtd3 3W[vbH])"J:"Rhk.ݧ#6#B|uʼ-06<ofWN٤䠐(Zȭ5"A 6@=Ieh4OMjl`vtz*rnxA hvWJpB[؈[-H*?. h騤 Y|# B \l:uVH7gA? U/.=Ed$eЎ Mg Rx`kEj8aϋ0N3 5.Q׵k_*nHJ P#hUZMJ,)TqbUUD"sR?Jx}y><)\TvZԝ{o !z7 @n|9^! -H`XH*enAe?"ʵ +JBk0FUcUiԪ:AFV P I]G|мf0ɂJQw ,/0T@ s@ݾ6ԬPr`(BˌD2D[mXP3C"J7!Obd"Gi8ˑt]Ճ=Z N`{Ef L3{v"JI4V('KJ+/%J[83X;gXWiw{=-sw`LWb1t yGs'C# Z͏n&5b lљPZ\N]IN1?nI(en& ̼SBHd"jZ#RU,.䘈oDDq8DlL/ 0pxl<,f@a&(#HHɪ F+4q68LAsiGjC}`a0Q`jy~-nP)? 4m4l1Xul~^z _s+1olnf~zt=#S3b$ZyѕyPc||$"Ҭ!vÆJt˳xQVU6]͏`*9n 0r;׽=]?v@&;3v"Oq@oԬӠH ΈAJ6@qH  I:N~Phƨ8$AP]DcY,-Y`]@$~ZmH ^knXa~7WW_F|oe߾*6×Uʥ{]e~_S~6% 7l8+ rqAOj:zq+=ّ:32h ㉱B(-l_mdbę g6%=q]B:L .s DôgScЄ=35AX4M.A_t?1( jDQ7nM ԇF[1egL&%7SW"4򄸥H&\>Ӊm .Np=Uc(:gw0OUaH,#3faq%c}=?*ϟ̈Sa9l㣢onl:D0 DYHuQJ[Rk2xuKc,J:e a@N)Y)Kq9׺B`D}g= rm1ꍴ?VA/?K}с;OGsofZߜ̶"WѼەL FFJ?wsSnAq^oZ~v= nswގ)RYm#)#׿ƿxnIV$M/a ):KԼ1A5[C[靵wcsRNR뒆ZA(V< 5$yp/ܲ!5~07[b֟cbSB@8f]_W6G_%dm>$OB&UMnI׃*ͮ`1?Oo ݃x;Es*Lؼ-hE:o&5q~wλr_YN6}T*zM.'PBZ:ҮkJROc~@ 6zRBV)ts"G/KS+Z(e5) ֈ`%T#y;cApPY\ݸ1J?|W ^O>ٻUZ^ݙ:TlD|^ $߭o}MJW EݪKUQ5GL]}lY}(~?'1*Р"[WO7ŧekLQQr*'y2zo=RxS[C%`,8 &!|u~xJ!xwX`^Yq$βz9!#XDM xՐ7SR ÀRRHˬ~"*#GbΩ 4ԑ \9]IiE?żRiqw51%E!bR^3Qb?xŌS KKjoQmTۆvUm;๢V@'!!(AZ8k ]7F/}Y8P儿{ڛ'":; Hnvz3knodE7H% Ez("Ĉ{ة6=uFc ;qufOj_}X)¾[gW5^5ͽ޼+X|bq_qy]}s#W-KӍ]-S2?x[kSs~c-y=W[~~/&<:UF 7-׶Q+:Ug<|׮=='W&/}͹L'gmJu{hr Ҋ TU5QI=4mugoskɮs(v薱"5X˱?U7ݗM B]].@S|) L`eX!]S7U= .L} zE' >GVp8xÅ볇5C/Ğ}ԁOiy8ԩa^L!I6;O)6Br!&n!N;"~a!/9ФォpCTu^#Vm`=?܉ E^6Z-fFrExcKKse8ۤo~!;Mc2]mEۮXľ_wI~Y/^"㗏2~T/p!(@1AiF2\4Q+Ob0.1 7wF߬ŠA`PD(^@/ e(3H@7Raݰ/D("At={\sEH C`J$g@A sɈc1)3I31 q̥'H3(SkJzW S @ZF`5qe["Y_feefef))襘K(uRb;Gzqĝg%!̉)#HGiłЂGcꌋ4hvYI- d%@wLz!N1k}<5C}\<s2mӃ/ ߬=" -KZ#zGq`3"Py@\"wy;Y=fP-ojʅq:xagSPw7,x@$|dj4E< y3N:2 Tξ핣}7?GxG5vHG[ky8|\xsOQё||-KI\g{IԎwPt~/,QJwC ~j γKgNuzE>oͧc"+ F!V.cv]ALu/| w9S;k>`& 5g_%?*WU?{: B ,`+}H$ *! #B fw{S\vm8MғvPo=_ʴ 7P]ޟ>|70:~yB |,15XjE[$UiO1&"isH(哷 U lmχh6-V렢\͸X\QY~Ͽ 0D09#g[K@):]62RcF jVJؑ@#œNżK>zP.)JRvLZs|{5}ךE^WcAzbwm 5#X5@<^Sm$h**Sq43,UXbN $+sZ- `L3HILQ'5BDCF@f"V383[U/-1)`dV`UK^.Q-*e-97}GK%JKu>u!!/\D[ɔG-k7]iy@i&V@rخ[j.$䅋hU7ʑ܀r\cjb^qD7$y|ݏ? b8ň}l3dPI, DwV]ȪMon3y"*L^9" xqcgɾb\xd'3BuǓϋn|&3'BRF,qZp*E"#AwFY4BH"B&XXC@gS* &2Bu9Ŵ aT8.4 N ̪eh&ptڝ'Y{Hg5W{[P$¸-7]4'}oʼ/n$6O ›akOi~`󰂞|vQ?N'gz2y89@>W`frv@?47{->QlwM,v}{ȕ3'x1Jfbz34 xnlk*j 6wF>S ;$ED,9^yJax3)m)>XӆrF4E1o8sc*gw,v 6SzLzLWfDr|O},`ݢHW(&~ K #CAv*w`R(P{" s`IKwA Ǟ20){`v{NVm*:dX\e֯f(q"G2k&i)آkSl{W7f׫t0,LsV&/VR'CrX᎖SQ|MLjS;3B):0WSG6Q}YS_IR;xMJsof44Ŋ7Y]1JȧHXMbWM$HOrL&rAVTO W)k!~&=3?&J2r9S⁤<|U UnPVpńc&!iM Vo6FZvI;YqUZc /pW~%boB^Mv}m+5E4*H,=7 ~jYŗ'!Dpt *S?n぀uż+vgaY 2@l?,J  j-vVT:2e[b U+}$8DLz,kN,"/1@Z!O /d9!las݉JEU@RtȜ{F/V:)KA08,)dH%=(Tsg4X K:@)pz_s~<8NRՇGsw1ej4JGeBh4"ָL;!y3՗5 ̧(T BhaK&@#¨xgehty\a8Ҳ{BiE5xU&jW4ha;y7Vٴյ, VO/܍nX#{xz[܋re{/s)^8Nd={KIF't&GQF.&4k*=(EC* Hxl{T"{3FUuLɌn/^y{Ŧ^9#bg)RA9:H&/V5#4Ӳ)b&BJʓ҂UJOZJYfԗĻA#"R-I;"’t6McaLIyG7 p]]#ƈ+tK.,cZB "4`P:]#Py<KTx"έ)IbK9JҹsBRjP JU͔0Oְضfp3vONEQ;Bj_J;ƣ.&isRwHDej(@J"]uX2cN0ukgRr6`Bո;<-Ӷi gdu/oM1[k r@癠TW&m- &"gP>j{vQB ]{W 1[VзƿՄwm&rn#ƷmM5GկխSMAjdxbwzwHavuΜr*#vT彭JX[Ĵ;JVsg=՛*3~ A^w8;*Q: lP; O]VQ'M\⨬R) U=e՜װUOG`~'E ܒ³iV؟ä2ّpm%S^XGoIQB Gtھv;K(ROln]H V2UXDڍJt ڭ-9MCS'fj.$䅋2ŵڽ._?wONcE En{ 7I(m ;{@53pR"-ANQj@.;=E \ ('d9׵eg<3_k:̞g\ u*#dM}JJ5vmop`R9 jfmI}+"ϖz>a+TZ)OaK*-LRff+hzu$ڡ׈ud `EiwN nkGŠ>#у8rw2N -@jXUMrmtН(mu =\gcA1{Y,ZfTc6_\iEY}\mD*F?_/rC۽, aXn x/x1?O,Ǩ:Uu92x*f"2QJD Ȭt8FxhK`XmJCu'uJ- Zݸ#BIp 4g0޵q2PmQ$*o`_dO'ǐT*gI<;x?kw*0خ>RIJc0"QYE+T9s-Pdu\:>^T} eBeX8I^nd5'a(31+ZTVHE׾{!WJ oa%Uh\U&ԔM" sRGe F{`hLdFt{nC E n?j%3緟Sܶˑ)Qw;!4VsԥNFH::^ (: [@su":آܫdl=RT/vOg ^ +ATQ͈K+șRqp{$lQ:;$S 6| ʱG~:Xy20bL `G+9wȌMǹΦs LdV:Ν6˰̯AM Xqr;Z s̫եU3kƫj\~cꥪ+zL1uѹ\dw>=:KV={hS 8Eb{>afc x|V#9]`%F~N벖hz^kƪw LKM;rI6 xG}9LƊIèmUZ\(q.&Vq=ph{~j)9AN_hj:}xtW W}H\D;ɔ?7"6A^״nho*橦uMn]B~"I,e. ̥Eefz5,OQ}C5 ԰$#_u22fX \!K-|w cz,K+@}ET(m  dYJi\~$~xS^<=͵Ndk=h}K}bbn~.VdőoLJZ1Ԍet捁f.{PÙX?w[=9Y{ݯlxmu?_u?r̐'kKMM:w_?Z:-sT7#d;MӾMXұMz4Uc% ^ )dIB4p ,Z݇Ǐ=n{}3=(}Y`PZ̧W#7 O[ݫ]3mo9: ,%]+U(qK DR>:~tP %@fY%hfdk'\w05U g*1E(eɄ̬Rh1 )>;y'VVQ9] Œz/at!'0Fk ȇw13O2^ge<;oKF3ZrԍDG 4Rռ*#ӴMjĪc:Noċbkb6G SYC4G=f[kc..\beKdEAen_*pFʫJ7r^>VׂF#). D&. @A+I6$W|t&I6{7KaZ8%\YiA&@Niݴҁ{"n}Wi w?֚Nqj! ?=LW2g]Dkc6(0Z %<ND0u\7&e±vJeLz Z[hrU8oU#E|@: e \OУU6Z3Ĩ/#[&tw}+su!%Ok*4R=$On>0ݤ^TfWHIKs8Z;@Tg XUVhwH8@ ,KG4;&C.1n`@}r7p9${YZF2SJ^%v{,fp瓌dBR,,kᐥ3V4euM.~x{ clQ=^K/QKM exSo08ֹ.'.\K:q/xbOO[::%D:t6v9(hh(;Xg`@ںPq0,3I+Jh/aCzdxK|wI\1Y)uXnSi~ F#caz=ZrYy[ZaADp򅩤ƺ_]ůo=z359n7Z9?Ƒł#߸N  BeUx!U+bחg/r>K~9U*(=9%¸-F[F[BelitaW}(}g{k͔={^dm3JsT3T'(J;cHTAȾptn՛R68!YZZb1I1EpOcv5c"s>&fk8"~|cM~[Dsqx}芿 ]F^w/j=FbhbJjQ=ӆXm Enf|~|B=IM8 |-v/;и[=ʪAևr?ZХy J<7vI 9#ٰ恚H,)3X[j\֒U s}3G (PID 1҉ʻi_U*9/< zGǁY ^*K_%gu@lnQ55OrI$Gp5ԁ3(go0EW8++n9,r6 `%`o%qF\d s}r:K&1еtiS5/ɲ[?Kh䚋hxIlKjeN !a\"dOUB hh.Ƀ8#Rn}Y!7F5;=.%O8#}WstUi3Kll3|?.[!)1\;+qNyo1J~4+5N-^'\VM5[P~xnöʦg6ho{}?Ͽç_6Y\^Vш|&G706LzXkwWTB@L$f2)+uh4c2%¦}xㆾbح\brM W2ϋJ涯JV2iTJim9K/K咾Ce8(XiD&5)LJ#`+qMZLR +y@$+Y6ڱL3N*"c2` vzGv EFN^MN0#ݗ]/ûg:zuV}u> Y*c.#K1,'F؂\+qTUQ R7Ihx ̽VǎdG5w{,u(C(:$lQb$rd'fU]3$ ) mB TY`dK*4v[sN,/ ߇Y~#lfܡ(v@GK Rm yqY. 3ίH\CW,ނSdUOo, tWKΌ9جO ATGb. !Ƭ],V1T=ud;C ̮aՍW֠4t ӌpY\nvٻaL =uM7~fՉn.-}C]{~ZWpG]i(U?tZCM ]U&#oSbu+hz.7$@0CdQDOޢ$X ~/F=R4qɳC_6Sn1&!,\F`G/ήi荧sԠف~ tcݻ-G0y`Mik 3fn-K0{jP/UfsFy ْ2(܂ޤ7+Ϟa`ێ?{֍ /R@*=,M*ÎҒީmtx h|ht7gh # Q3NHXZ ,X$׌IB YLTPG_5Ʀ·lhz*(T VS&rfI\z \5T$hUfS` -HpKr"@kF2ꜰC17|Ε{|dYG{/Am9UM|ߖ#ъ)]"@-_r ݙ:= yoׁsuufi ]zI3FUwM݄I3pyQ*ͧmbIdiaK+Fu}z_dOfu&;@ώqL(,O:]^.[{`kZuւ:J#GCc*AիrtE@#xxO~E|:QScİUѫߌZ'07 C+NzùoIX>VP|˺DjhլO3H&M*iO? La=m M?WZd_߱tٱ'<*itV+C2IsQnR&m[&iH1C FRX:0v>Jn$MAkI ?R)hET0r ,DRh3,b @+IGc(mRBjc >$jzY'E1!h3Mֺy R"&p^*k5\(mTY1Ĕ!Y*()CLVj b܃G!y3>pq+3JЖXQ uV9mPP }t.EI1X) fGA,EgE ȥQ4$QrYPC,Qa#0t "їFyo2/DLCp4lyP%p9i#U5S:oxJj&x*.8UJ?8_]q9d#o={n0w: %*kp~v|4i0N|_f/ChaxW>~xFf8ϠwsqiEȫO;v4{}: ~&pmz cuyWZvTZH~3W2ftY2]WI˄ePRyay!-/ 5FE!lrFqDS^ 0e'0d % _ IY[ TS l,08jT4l/0h {``͙c^^u"-XnLNSn!&!(}hŢ ~9arN㔥)K;Zt 3bK{DsB[z^oUo. ].ECW 88Ad48fvT$/)ˉ95(cE[ h 1$@>T@5$ [6B47LEXi <Idj.,ODy5L)-a )}t"ĴXg"buF bhpBKODN2! h*KNPR=0,0J0BČ::(I)&b9AhY*|0ЉN&1ˁ"&)=:Jpf.-vipFj7r Du1*EYfjo/0V64b1 Hǥ*j\ϵ㸢Q#Dc$#\-^ӑDЍ_?#yS99jNeՑ xA=8t!JS- 2qE;=gi^v$K;*r$K;_QStEh\0gNC5}ڞ)vv8K/_7h|r gr2/=XKM-s$Wʑ{o'i@dQT b4d9uM:u1FXYō'& %`ĒbQ?iObX%R;&@[ \k`TDRS'XĠYC@m 91kYt5xvTt: Fv?ݑR+iOs菥H4J*^B:7bbtZss}# :דV^]t(nX* ]Ut0s >e6;,R1ԫ |@ RNW+Q4#(&^šƏ5J XXhY+;@"S?;} M; ^SM>hޙH!^4ϣ Bu/ [+f7$w]E=Ws'{_}=n NXp;;slq:$S vPÍ$@3:\h|ҍǡ]!1{Dw5jۑ+!HF?%&KJ<_D] $5o\ȧx2;!H1c^~B7b`cm`b7\!8\zWG IomG(ŏPžτ@ycA߀ fC/vm]F03^' @!R=s{B0[,ߑq~ [ߟ+ /csv~/P q@PϵIgjd*^k/^k/{:'BZRHƊsڪ#= H <kv.SQQaη\8b<_ή{T [yQ=ep!g"FC| ^^z5d *DPMsBBnhsb_1VŁ> 9]1Xm@678:)Vϖ5)^y0Xcֻ;+Pi5m~^imM<{wuǺk>yx`|Z] k ?cL=;fITֱ~2q~ՠ5@0l۾ ~28$b8>VۼQp*q+zChMh++gx%o,qxAg*FQn_mzב֠={F0>Y)CO%hA^@L+$Ep1xB 5rz3N6 Ym4y£LR ZЇ{„˷tIқݵ0cU28|lԆ͝LI&t7L-]VЛZAY6'^[?.5Z:[;<軓Y'ylY/gl؞X0ZEIJ}" 6ԼLx@~G%4n^l|OQ 5胚7Jޜ"g02鬼bvwN۸ ATjqfeYYeQ~}DOc4\tS"F_3Wd< tya/UhN2+bSj1Z3eM J*)Ȝ4vEmJɞA( JUQzv§fFCxI0_ߙ`?ݒiO罏979GںE5Fh}ʷn ӧkE*kNy~9L?IWDˡ&d@B=!ٔSZrFsh<4$Mf7ŕnlEƻ:NehF[ic_x %Q(ҪЙ;2zy̼^y3[aBjmSe"0"Am8ak-hi[˲Sux=M\8Q$nG>~iG_&,6tz .Eo/b7㳫>-QnlLYLzy5꨽ZNze̽V<I Id MDl q @K{p~\ypN8gZ4)R@A67锂 rC=ZƠ-WPH0,'h-HԞWOAk ǘd8e([PFnS"]٨VZ)Yr%itz`xjca6[8@EIǢfw_]Q xƘ!I 6À*"LP W~dF/^@e dű 2:*O(ɀu nu8u[$B<|H20+cS |bLAyZAe8@ ^mhx{}}Ά/M{@_IETPۦAǫFs* j6yi*y#, 3\((/jQއM5&!G݀&QnنtlzI ݡWJrdMW14 g$a$*`TF 6i:,fB:MBtP˔W2K$NŊ-Mi<$<ȦZCȲN $1 Edfx/)U7O9r@T$<jmt.Ǔ)n|kdIΥU(}cB'ж`BVʺhjd:ekch4>#׊zgFcP$;^; x68l7<7~+qponKmŀA]e7d-= _i jzd5Wi[#:(9G@e{f߸}mcoˣRfGKp_jfN"% Jq5M-eS2 oYUֱR3c$m8yƦ9ih8+\UJFUN1$5Bj2q7%QtCȊդ3R%[J( -/ҪXxަ 39~w ǺC877u!P6Jkg37Plm$cu#ك n"0P$Gxyd t,6b wO_~X-f?;Gf]ˡ9ӐaD$)q4W ~Sxc cdo&dS'.UmȾ ȧLz[{(ş~/Շ }]mPgZÄ:ÒxƼrgo:s@I_h Pwɮsշ/cK;Ǩp5W:ZO~[{p]dsKsO4l~(4V֗AB_BItmZc{;A^9W@rbcvDG6:DlQWRs*TQ@bۊ"2us%]02T|0.>Œ8K3Rvz>J@ձalC%^Rк9?scd]K#{r55םwXHQDcD$b Ѓo 4& s"p80Fb(L"hC" dĩ*ꆹJƙwngV_!| \J^6e,K4Fn$nYfrjuRi!k4K@"謞w"|tQ;/o b,]6@3}}Io%Jha Քs :Jv;;T? auX* ]׿naUdnIwn[PC:*0Jj=q<邘6tǧٓpev{>>`͜ [ KJ `iayR[$TC/'ѦWf+ D0x rgJI>f|Ҡg_" !ԅ4X)t%T!ۘ}&E~ m`g zZ9w=n5BQUɪdeyIr7180+.+N~^)ұz+-i8V4s;_+=E\pqߨk_2Hrun~_R,-"eY~,ˎ: kf}f-?qoxCvԭrar&&RNz_hdNJWԥѲ5+k-+J[{6ʈ*U=/ 0k,W٬P{lȀX˼t}gT 6XR1!@C:A4y(I8r,TAP*r1~n rDFiUUb3W1 PJy]ߙD.o2\A?$4wC}۵Pl7wWOŖ$K"J|~ů-G󎅽{X ogOK&߽bsn[;{~?hL/ߞ}gT! 7 O蛾妡C0T+. GNn (Fg굪PZUZqXZ(h,8-VzeÅh{4e8'SU엁mB̩q]].ły\|TKF%.prSMh.ׅG 4vks]TsqRwy8D$lb%i{WIģDF[G!"qayC[h|s 3&ƈ4j)ᬂdro<)2Rƈ1ǧor|E% ?Ye;8}?]?C:rzpn'i(Fiͭق4֮ 51qɵ5su-2R}uCp(º<^v?<ל5o`Nn1__ǛI3ON桉Ǜm~y fMNК6Cz4L/@1oǫ7q\ꭓ1]>t%u>gЭ~xTp$u0jMyh l42 Cg }Ήˈ&b"#1n83n~aChc XȄqQejt9\6[nya dMvrhPcd^B˺*dUGxw&o=M1_9-+%z#p|BFf3ΖA1OIl+sB _i}Rj9wdh;#$K5μɛ35ſNlwAO E+s<<wo.n(Zƾ.7GT'az lO;UףT6+}&S1kJ5e4YA.9?Y3FS0:ׄ 1r E45`NFaᜐ+)!#+-Y#f*IPJ-rUYccJǙ+C]teHHd!Yq`U;]OM)C!69`Zل[bG^3"#ئWs=1ަW^| CFji=㖛"rF"'ܯ8QyrVx=rWcTz\%jtUqW>= SB%Zc(&/RQ@Z~֡F@Wu_tisQyU + Xzfj[k/>{֡iɫw-mI2t !n/F\gbB:>uW=hHJ=r$u$ُGUuu7i 5k왷)l$ΨLxaml9fCGWH,m5c5`zbB̫{gbok&)%,rJPb5g(KN)ͭa`gLS")I™"% rN4у)*OHst[..ԱCc1_.qw8K]<9Țȓۏ(kȹyZ2᛿lPIg?Xitf/fE+8d 0plHm4@Du9清p'gmC"͙IޫSPR­*ʼnŭN8Uve;*|<+ؒNZj1Ǒ@}bf1fZ )\%W]%''p7Zr@ ޫݲ$;'3ISBP(<*\)Ik7\y-dz99=MM4$>C`c 401t>u>ƀnGϐK8Z_ b(aO~ 5a+ ]9k${(ƔN&.$a}MhO2(R)ϹԢXy8x2ȦyDD~"0ǽ!$+ 3ћd"/G Kҋ9/Cf ?|i)Yo2i({3>~y)?/? >s8%^d"Qҥ8sΞH2c$QǙ.<0J%g+'&CDe9 #㯕>2Z)WrX7  H8w? G& =ǣm~MR7_8->La}yjqv2ä3K ):J2-b&E+:5m5r=n6a)M b-j9XܸjƖmsYSiϫxgw+bh5ndzFBG Ӆ.5.#96aOj',!#%U&M&FD H*"tH0WPN0mӠKDfL[ϘCmƴ KNQZn0΄W#eڊ=bXB*^94rP:eDV }{O|9a4Yp# խnFe3w;$c6ۣc3Z7#H*% 9w:pY/("rnx}[xX]zZqfywu= ~<d0Wrb."$ϯK—M><:$쎯WC=D@043viTNI07)#xyٟ|9&V\\H.X)WȌLBp)]((xޠ[!JdǪQ z+P)g.&߷ijf$ !]弜X2egy%OV[,_<(VgCPag'᪒wWzխ[ ?_YlK)u6O(I1Kvr_5ok[|7vŏ3 gGa* Z~i6E+_a2 _*Wxm ~ ?Ps+H.' N}癙GkynVy=O7lfO}))ZZkp\m^,eQq^|lEeCfQKH|1j M%nU6.{;斋ouB9E8Aꈯp,kUznx毪>dz>6[q7~i]{WkO?ߎ&ݜ{3/~x)5bҪ!ƛRVY՝M˗wWq7@ `'5_BS,.@)3[ml_JX& 8~dKz-w,j +%sc~1Jn8㨽-l_LsIS1& )%@!#SDεf[ ,cmG967ufI"ySmૉҖ$ nTQv sLdxY}դȐ`TBz*qOSQ4W 1'W d<.]:@ !Mh - 94{ӑ5DžHzd8 hVo p4vÛ1s?l`8IՎV QT~?Ivo0-yFKT|c[+Ɂ6#`֪ٶ B ɤ=V Jϟ6LѶYE1W3Gg wBS4q*N p9ڤMvEj#KǭƇ|є=.KnˎG.QktI1%M夗F 9<-;e9A4/+p>p#3++) 4fKNys41>_L eEQU_ ?GoֽFM rAҔw)Q>ü1_+؝K*RΝlGNh-Vc1ȧTTHLJ*:'h[-aKROSKޫRu?@. s&=XR'///;(g=8?;3vi_;hqr?.nVe; H7߆&j懃7Ѥ课|?cK(ـ9uI눹A:9mUԾv9#9k=H'q7sJ4Q:DThXp$1d*E*vKH|BX* Ih IN8J.ʮ 2+/ ] %%w[3-aBgn(BY'd~vǼIcD;+Kfeٽ` .*#˽4+[kZ9 vFRD<[w] ߮^)|旷0}|>0cb+C麟e^(g{xҒGѸ r`BwQ_bMn;8tx,\1k5IލU\%SJL{/;F@2rFEx.t= lIphy-(b 4-ҕ䅏|"z^ GPNbSvHTQ.bU-n%vIN8B>['j+Klk}1c}l7y&B~Wf^l<&S{˪4,H-WJry}-WZpIp伡(M:5=96D\n.2@[NF n͝RޤђK jc8732"ˌәc ,,1ƚJ=c,9 K"^L;xO/kQDa ɼRF )>٘"bAhn%I(IO6UuF:aJ\Zh6FTMqRg}K|JP!@ "b-%>NPkQ7O>e>N0e$┫&,1˔L S6#w_b ZZYTSCAj󜒙.EPfXafdm&wI_eqGM|@0{~ŀlr$ٓE$H|fd<6-VׯF4ڀӯ )T:l$@&gl&FSDPmnbbf""61i+ @ 2ԛ[<䰉oڂۃ"=lSwyPo7Bv$mw+?TahM(.ZmZ\ٺ)æ!UQ'A -#a݇[k`L UtX0NDxDR¯`Pۊ_CHl7ĶH`2%7JȦL@"wa]ʺ`\1ϬJI<զU6ѯ5Nlxc&v\-EA8pb߽ͯUX :ລ m4%)qء4?7ظHHJ(2[5Ҋ+RXwfl㞛Ә mrOhd3`!l,VhnSPEwĦsIkp]ݫOu}Yn.hAauӝXMF.Ru+V"5YNG=# Jٍ&PzY< RqR/MN̆H^Ts:T!' :*䴵.S&B߶:Wйn6"7|߱wm>ݭεF h|6i:yMRw/IjR, eTB.(Tz443$IHa*Fs)~"/n~ ;N&Ltcuvwg걍6\|i@DeEoh@R:tpZQ͇.,`2dgx+40 riSC#(2c)ϙ&Q()1OR(%u ز1&SA4+Yq"S,&9HeJivJe )&3Ўi4v_N.RW_E!Ĝ}9g~f}^_aj>I< r7B >ٵM7s} &l5IWOgY I2Mji |4'-6S,zzL[ǺAFy93yJJDZr,"E?r|Bx MMz͐=s %"!J-Fk·C0(%-ǧAN x9 33mago Ji#߹tO}i ǐ'cQ73hU2 J狨 N }`9XW)^'(K4/LB))!v %p)sF?_|+)@eJ,QnEҵ&N=0O[FLp6dbXȯ5qم4_F RC~M f&ia){|2? 2MjG'&-ڑ~,$Y*d5-N<>!D B$)I8% e&\nj8I"sr*UBD, ,`, Jɦeg[h(*[_ VND:dxi$GR !*!qyǶO5[,[4 A0!mhs Y\TMެYAOK9Hk֠}$ܮUyzKaWa-ݕa0 Ta!&`)%A/]8W [sڹ%[6هh 1k'#ٱ}@`@]Hsם3AJE,RhWD8Ks.4:.!Ͽl@=i _1LJ{ 45Hp8S}tDwYS闇dVJ;,KU3 4Ĩ,%#m/EB? ߲疬S7Pb}'daIb %\#5'c#>ޠ8F/?>|U&ٽbvEF@3W|X6||*\¨ou_wM7\Ã~XEТ'7D)Bb ZFh[>(HmH,r]_ &:gkX@ۺqCib.)>;d߈ךsݬ,JwԃYLV Y^g>}PF[_mxճ/e6'KuIUDJt?/@˚jǺ`t`VQ8Cڗǧv-vd oJA;)[-ߍ'\ ZMm2G0–~GX6#p]k:ȴHZLvy-~.~=Dr#@y__EDpu=h.6])U=diI6͐{ =/GmHcM?I[\BDRD!مh /ٷJ ut7[g+%KwEL]>4_ FV%0NmL`Jm*+27]0+F)"*JS* 4yj5З6:, .qru-:val'q-Jg ;Tj࠴D<ڬyǫq܋fԓ>Y[Rp/:pb )Y>^?3-LHTIcA.A۳Oޜr1 8Ϥ%RK54BDB1di%ǘ\ r.Sާm}/B*>wt(@C5vR+!X':yeqr K, =qk Wg{3jcռ-&Vƃ!WxcD Ӱ03PBy,xCѾjBƨϰ=BF{!` Z2-QCrnQ˄%gqU# ZJު$ރvΥl5]$AFxYOR`\̴> p0c!QN "*)+ZDuR\Qnѕ`hhq*ux/XUYenI`5wᗭhMݍWImm&XC-U]} ]>,r2`L;.&9~<~.)3it@H0ʝhDWCD`JF "~8F;@-jMd\ZAe50To>ңDK*(X∘1&\`_N+;VyRc"AIknykJ?^Tr]u#0ƔPJ\Oz/ⴓ5q ;>x [cHg>@P8jyfX.̀]:nQ|+Xj+vF-$ ~CP>AfZ+,$H~;S) ZZ웍H}!9780J_ސϢL5ۻ:4pQq't`{M|Kj#P߮I$l(oLH{<%DCY⁦ҎH8ǞTĢuдR1@L)KŵL0Oh&bB$d \S3A*"G34kqFE ;6wvA8N-Jrѭţs6nՅEX\j8VɒgiNJMXCi9ih6jgRJ6 E4S=E9Xv3?ɛl'ovj'o>-R1iL~8@"W p ъ pK听 xIx9x;8\9hAa:Zt3Ryoدf>鯣ه(4p9q;#X!'Bu`e lĪ-4(1.zʀNÄ261o nDof9x`A;5lZ` ξ$05کZyB G8z HNF~ ?Cm8F%*^+O# Қ `"Fw!"M EO*?Emϛъ5ŰzNmh,ZHAq1#A-F #E:pEwĨml.*%`ڳ][Tޔg]wq[4PΏ -B 4yZSxb)x+e pqYtZ_Etz$8(fԸ4v"c( Sklrn+|'w.~}5|u5Wq:ۍ~ulњs#;;w _L$%_ULx~}H{l?$0F(x}d:[jwWW- >sko^Z3wK}&pm&>F^!Tjm;aRpmAtQB٥(m;G+n@pSx>7rGJP䷅єq\KEkK#[^]UT.=lv)=;W)iv c/FPvFlTL)@O8nyBgc&b] WKQ%⅋exN.2>lA?uaoedm;Y֬whyyKr'>;>w?l\d#+yq}b[4ɴ׽3k$7=aK»`|MrM4;;oSN[i?8=/:C G N-G{t5(9RB2)Nnr,?)e2SJo?FvP'R+t(/  ؆h,ts%z}m}M?qpFgh@^]d'/nGŲYڊLԋe+W/(|nw*&yO)a63&WyޮK'J&:|}x) $e0H`4͚`,Wg!7EOo2(N]D>ޏ?_7@Fh4TvzA[ xRS$n{L-xb|1:./N5.[,,ӂ\PIy:/_S?Ϧ7s z 9i &%>EYhmҸxcUNbJ:ZQ}z;~| 3;8GP>Ih&E~>mB ѓ;PRU-b'IO'Eirqp(zQE=DRtEK :~0@C\) D[}[}V+tP14k˄]A9xQEڳ)_*_D~kxGɌEA$On <Ҋ4[\ܒ 'MNU׵/ԔKgQ9sGid΄<ߟݻpݻ{W-Qy"%  +9"$*y|n Rwӎn" D*a,P^KjXŕwQ\ ))[+%G[e5q.& N;ȨD]$52Zt 2p5Ԉ&1Ќ^3V )Aҳ()w'oliΩ !KY<q:5DR/Ii00άaJGjIXklP\uqRJa h LuZgmzo/ ȝqS-<gPmz 0()$WFG.p|)z8GH>fNnaJ^*;JX=5!HIs-m<G0X$U(H L%&QIsV`QM(\g <;u`E["gsU;HB|.ാ1kbp5T>jfv^ ڹ4TVǔe,AewEYuR^g d"' '( MNЈ( ن3.2-'EﬠNA+PY 2?~¢Ƨ;:|(.hݻ>n'_9*?stѺwNFk L^:u=/וosDy9x$Rْ9}?v~mBkiƣk08 S7`4t*Y:8( qMcʴ_jg Ez?sPXeNB;5Ol%F>☶JW9ah Dy,::uvkƨ5;Q'3M{%{NgTK괬`D݀f͔겖Ʋp }hNCG9դ e!@Ü5W\)^Tyd93jH C&тN<8 "T9+yD6"/V]n3j:].l 9* 繝޿o eG.aQ>&U`0|1@"zp6A2v}yW*?^6vҎ+A\N2Sػ(3y/1/_STV<зzU"O%iVxRD9I`*h WKV 4ȴkӖS'R`゜'l)p .IBq.ncy\` E$Q5M0YOٺU XfrԻS,X Xf0ItN]φ4IN bd͗ * _?TLj(ףҭ$ajc+317QmhJ 1ׄ1M߱D;#_Kd!U1w0!IktHĎ\:=5f8a0I{f_Ը4~AFKau8jίso a//h=h%ig}xUM fRRuaT zcW-| SL8o=QW3pQnWmouLk;̻pKkcVyfm}K9߮((m=ZrQxS^DG嬐rvM 흦QT@[NApqPMч{:Gݠ ي PL?t̫,Jn C# $:J;||gdTgaՂz4RV1fG&c<ݧiXNSײ⬿cq31gLdsdiy2BJ%TdE =MrHij5nXAs-`fHSߦ qm`uk\dgj㐰:;X^/tl u{ ه\<=\0 yB4+^zg(iaRYW]^6G]*Π.ASw.~"JյpY}B<@DkZ3~ewMA/t qz F[W'sL3 /Yao |/EN˥PիI(䓁Ĵ,6 Ti͏Y` A{'pqvКiCqy!#zGQDՉ)V Ђ#%X*ӘU^=p@':@-zMt N~'ݾ>+z;4`Twjt iTe/t_?OR&j歫Rm~ZPT<_O,hRwG-W;~׋G_O2׋gL I>tAX5)MɊ>=ڱ %"!_U{%ݪt@*S-/9h{}4͚@ dÞ%}Ƚ>WkV̼G%>=c&ЮlC Mc-C{R$Ҷu VDzq^1EH-?|#qֲ# B3 G},U.##,K`py'-Is{}Wx~mˬw)s\f` =B]I 'F#kq2E3Jxf:2굄L= Z!%ǦJ|3e'X)m. +'la,'^;'Gs*8 )c|g] HBAiI,uژqnI"YU!h# yC[@eR"hG)1kYM9+д{|qDaUXeQ3M"RTs.2S]IK] fLM楿2 XiF@3Nruov 4~%A5Q'Nɜ ІkAmgXpA,h.ʑ*A+ Uhz9hF´BI 5{6drս+s{(=jU@FWa7ѿF!nHE]HPSst 5:CDHĐ N*MZ7"sjEnj %9^R 4]a"]#22Qt/eۢⷲ/zLYfH.ʯNwBhs; `nHe0^#U[PQ85RSԉg=JS{7!{T}Eק<*~ȂUۧyEM)$J S,tŧl xl b ::l^N]sw>s6þzNt B$g&P2O򹰌6tUvRr1ISo]ETn{~? ݑ&64xd1l\|-8/p@ʞx.Zت:e!A2\[&X5#-| Ĵnݗ t7肨9k.k-  )w-*`AL1}]y *ʅ ].7dPA~$نǑ ofyB>I>>>d樂4pjA9<|i*qeK w/2KxCM&O1L93h剹 ZIʧ%iV>s0{~#U*H tF.*-!*]CЉ!5VR WQ+ 318InNs%"V( Pc@SN*%4TFr;]F TiH8c7TIbD$4FN"!\ xl%(qfPmWCk(JԆdT[5WH) Mš9#Dy9c&Nk;tUfͯ^nK[(-%]zm.FKjiTٹFtm$.E.ӵE1uU.]d[@RxԁwQy?~z'`@~ҵSpi qVB - 51I令lq84W5!3߮U5 ƴt:=K\K.Veվ#w5N7\2!;ώ2JqΑq葸ͶmU1%HzĎN?Ly!d@;K%!4.$+鿔Gm,eȞPQ}*&I37={U&שOi޲JVo~ѦSn(WײNz3N] BM 8o3>S0i6Œ쩊w dׄ0Dך.i#|~$EU8(I*y|4mY[~ P]rHa?v"S j xw2}JRΖg| R(b'QCi%ӻyCuc# 7V:sˈ$cgFaX5if<+P'[}sH}މSKr4d䩖eWVlI'+'PCvM8*V.Sahs%\kC1=R;Eu(sBiFtI 'S7QDJ:yZ*6T9w Rf+ښSx FQ>X T}t*in .d}pC}ϪE**{"!f\C8RzU71xpx2rRS hU۪PY (;sS?b2(=u|h~8}k,8Dg?Ws}}=W}8?e:B՜9I$J9EF8,' ~Jrk8q3LЏ_3hAK2"8P> =֟f3Pya0nY{lx[iBP {2C#;9͡4q0j-[ ॼ0~Y-@OsqcϽ&0T)k Bu y. {X`G0t#@Vvrȕs?Ms?{88FӇ0G0}JwXVp2Gs)i \ҫB* *ZsFo|%M P;eJl_ 6 {A5F+g 44sʙvR =İ\ f$u56!36Y4`H!82ZKj#t4vPXA3P9H$NГDo`K4q5(^6Uez{xTu5My&} /q }1 aє -xrM#C9,c G*-2K"0-QSQpĸ"w$V[ĶR7πd&)q,4̵" G|>]%;j,fHyAp"|F.&/sxo韋1sI_RW<^sYx-/Rx}UiyopN m9\dfcF"7WM{񥴠+fBnyw1;O ~z(&PTa'o,q=:$2&V橵㦛R ggb<2?Ax^/z>IѠ4emyq5 G?{㸍/y/yJ&{N>%Q3v;tvv|朗N錛_T>xs-XBֳbll?%,!51pO1\sKm#gk1W?uU_^ 1*JködPm6zRpm6]̵U]1XՑHR_@/.BlA/-=zf}b҅~dMwoqq4^|tdw[|G8##z֙QmF4ͭ[gCں ]9WƧhd5iqTeG7bRj5*e1_6-GNd\~F|\SC۾ L8j;M7یiAi7-&YjV*K,#5u<b;q|s_Qj>'ϑ=79jَSoa;NLYV󘴺qP+8U`0MlQHS ,O̝9/'ETr#}%dY$B-+`Fc@(:—ck3WU}f:0]1v6 N+ƮaةijaXv![jRW;.^jT$u^.5C[g@?Nh^}آwvH Jv1P$:pW;1_, \͌{yљΝw7yIG]Rdw{}OSy{y~:wr| XH&.=iM#S]lgZD/7a{NYE∅EЉS֓\|4*+7(0-oѻbc:n--Vĺ[JӻUa!7nQ6Ujx-%|O|ukY -&ń @z7v\0l/ay h413$'[{p9v*D>TX?Y?|g=>xֆ!yWh [TEbM+sYبqg"x7xcEBd|±."'VkFTc9'cH4Y?6mDOAsL*wO*,m"\.D(<5DJivxD\N ve?x\(^j9EtMARR+̑{\vvmTu4}]b:?U鍦y`8,hlm#VK̡[bs薘s;"1~kR3v fvN[/.Fbb #Ι4ᜟm7MC,1آQwon c Y;8 B^][JB|2=[ˁ\l>> G戴K}[k[:Ci)$"ߒJo}".ꀧ1mռI UsJ|2IiJT5Hu vl)Un4\SO)9KJLǹj1NQQZLaw*i!FOE-VTH6tegP%v|e?V9x{ ) ʻG |HH.s[}][Tq^yf]Ė[cT.q^yz`kڏesaSz%n^ciNGHOB k7}Ȳŗq9 Uʸ!)^ͩR~pHspMݸтF+a\iIq%@J‹WTZŌ+rSaƕ-a=ڑZ1rڑF%k&@u rV_( Hyjܔu꘶}VrXP}$Fam? qbN(YSnRPM $Ii)+APj D0! *:S0sÅ_<@_zϓO,ܦb`ݿ(_UyS/3ywKuџc#`5cB׌,A !@Ij],ωXq%$(Ӯ5AnL%*Q*h$vB>r%sp^.˶xd)T($nvH1/[i'"˱h9)=7Ƹ=:*ubc:n-Nu]-~%ݪ7(Vj[> k[2u-&ń' U⟨M`Zj]JHW#+8H Vj9ڼ*/3DSǴ; S|I䲑UGeoCg-  ` s$Wd1c C}aշ[Dl )g(O6Cr>7S!*ZN5JE`*a"N6ļ*E0i"/myU*ih[L^>V쵎O*(3NpAg?tx^8מjt!0VDuͦe':PkQgtC7:ͭexC|o)k^ V6:d$J@L+T3\%Xδr~ U[ZTTg?nwGRLD770GR3r[d Sv5 #mU]9)JEC KuIg\,<+}@”WF8tF8tF8tF8,a= $ If`ؐ4OPdsW8u)E7"wy"21EOn>??z]lQM{Clؙ_OfvEb;1̯O 9k\ӄ)(3C&:u%YQqnM1wX=>Aw_').Ÿ_n~&Bĝ[v'YeE(G;|;Z9|ߕLK|11ů=Y_^ dCqg]d:[jü` zwg=yW:y^|<㝀d΄b6= n%JYE8(kɝLd>pխmcO‘6!n$FZcMnRLN1L'J1a #`Ow%\7|XH`@YDId6D,㊒DaƵ4ۋ{xGJ A8#[ټ21?ݹYv6S9G}C4Oٗnû{^POiъ2[9@qk#Ni@@đݍ8 d6mJ61Jsp `ܽ(~ x\HkS6$I!WLIۿv|ARWYxJu(3F$}fH`d-9ҕ̥62p3F9d.F+MsQ3pcrfId T+0k*5<'j+z!vY,\' HgS^,Q[LoXG=V۪>( # w1}Mρx_@^_˰d6X_"I^ '53V ]UbوNi-WBdjM.o%}_깤K޴"lOΖvY<Ƨkz T~y--ۂDvLIiQe k\i;oAy+oꧼiʛ<@`VQmsMϭ(UscN.%&jM|٧ρ13TvJ0Ⱦ`nNr%9)9 ֥UTY՜Є =9X)q@M{k.+{gZvb Y5S Ƣ|ben?F7K_z u|6z{6`'K䩝0)bW*; &y.2ɥĈtGr D lrTeA-@rdiŰpn[=sk6}+;hn]d>^f"ŒBDe:1Zy`[}/L4_^6GZAUR@4{L3QxB(WH%rWF˜L ߕDd𫈒Yù\>E[Bf-CY|\L@Zq}Fix=ψ\oW;x&Tbƍ]rA-*?JU>9 hU$AUlb,)"o}2m~gHH(4sssYՁbנ:KuLIҢX#>] D6ԓ9=ݘ˳ZP6ęߪynkJco>DUM{ܷ8S 㚛Gy)=+W 0+uhwyV04@0n NӞe|uCe_WO&NfD ph3!AJ@NIK֑eTgco՟<0G }`Kdԫ81tqbʼna9ND$"5G;JPQbZ訽ρwSƕyX hc$|` "򖄍.ůu %Z^/Bp;]7f<[d}/eh(zvQFQ:e9b6|}]c`\BDVF D TcK;9%%E&۳Zf^_BP\ n(0 9OܪrཪZnܾQժ_vL| h\?Vw/ƒպ]$=J( CE ;U O{_O eP'/V&#mweZ{ɱ_%`K&/Kfz `~E϶v.@ӳ`W?_gOgNeG^]gJ9!/FbbsBӌ)>ǔُ)\֯@ #f_#_rC+CJKӂ}ylaLiOGd;M zBo'GiA0 L{!]'io V ze(=E>-w tu%5bZJh4䂡88̹85 @,\ 9B:]yXU%^mːyDqrP@s;?!dQ+TfxnC'm맸smrPH3!(+8DPT|.(7`Ֆ8T-ఠ\b5 SX6( J%a`=&bZG'7,Hn^qtLRku锥.)# ΉUbaSΙ4~pp=r^' c 1lx%gV3/HF 6+gv$MJ/E`c_Ŭx"~G@Nl{2(W|w:bJ*;iJC %"v㮗 )7Y<7,ؗ9w:!k&Vzq&HlteMQV8tDnu:Q@/a'w5)pRbm˛>\1Ce)l\6?Bׂpt&U-]5P^/rudHiGꝝL xH)l2lJPZs ;^*F$Mo^vbD@F7UwN꯺T2C.$8׍G ? 7FL)]ĩF4救'^e+!ۼJ6}Betk_x\= 5qsK6#%Z yߌ6!>e9ˊv m({ah ylqQ&gN@[&Tg?ETWO&&G%$cBU4oC_z6䌽Wpuέ{(;SS&t :ٮXKfq*Y"P*tT[GGK I5N[ͪ Uf4GP P>TT/N - TH2R(t2 :*H$PV H>Yl" *t;/.<󸠚"]T8ϱȂdﹸMg3{$1gQGtEsgH5f,&:#6!x% 3I9xFToa\8;p7VC1&=e/I)@FQB]UF^bDMzOSi(Q=} 59b\s]$?[8"OnBw˼) L&P5lI)oV 1ꯓ?Dv*)M{ MOw3_-/|,-߿ϩ}Nmsj{moe Bn5bv ,/x8`dWqc g1Dz3Puk[ՓL\kPDc|x]M|YZl(˜,n}odZ޸ "$}l5>(0|[lx&v|*".[ρu #-T0d!@Xʫ7RfGπE 2F1WqaXsѳĹG@ YfUte~(H+A>9u!#ݰȎ)Q>QỴk9)c̐ L{9 o]J9BX%b8͍m3[scLy^:SpҌN47j1)zhrY{)Ou'xZ>~DaxV垂$'##i=KcQ ȣI/zwbOBLDUiבA N`N# !ܑlJo^߃^ʠ^g HpmłJqp_1VJǥ}@:4  8u0Am-\׃"|~ ױL2eƗxUM]VS+DRffsJAPg8Rꠀhxa g` -fL|MfrB&9tk&d S"7[hx^ww^muꅫ'I6c뻙^}jկWeo6%Yy2MK6rD\fsR:?_hq̊}B%j[>eGmuI,,'gçҞ선gwO~x|^|7bxs}ALQoPH,dr>F_iZyҕ0-@]PV/I4N}zAv@'z]x-RÒT 0)]me|.2bjjpAh(sKtBP*)$ݙRMFP+5?*c7 XDlTx|QAƁ doS^ŁxIYqL !%8#Wv_]CEmZݚ.\.33pP4./3uX8sq HoO?)Xf'8c*%/2tU8Jn|oK6{y5; !ML2?a~Fv X 9t>X9 _')Ųj~oY-K@6B QI$urpD2翁gX4 XRr<)jDkg9 ZV+&Y;λA>ky^+|Jh| E+=d-UҟK+=V (>`deJHliKm>V^|YyLUR#lXe48JJ`=bQ ;+ka@7q,ht4 磝ՁO41fU$3: ľԸJkӎ`, A{ \S$849W鈰LeufNhh˿ ї,Jw{ݭ^QSB2V+(>w+{4TS1-qPÇ㠵ݤrF'#BJͫ]6#(C7\QL:N3_C/8D$ñr  *Nn ni4/*}9cʹWo@ І")\өO=}"hO RT_G%M#qm %~:5/AK?})[_,u/ G="K0z(V<%L!&JA RCz)K /L<(%)SYq=TsLE2Z,=hZ7Rֲ@6Q\y3{}V!zi:.*S:I}P؉`O~nS%hf`C Ui*OlxX9 :\; Ai1-ѸרYB'X[r 7%%} ]ti+YJ0oZf=OCB5>.TX-筜 ig+(/ft\Qxdo}xnp7 8AT \qઞ8`=p$ ˇ$'S8S'}o=ܷV oՃ;z !oٚV}7aB,[X(;j sWNI0 il j 2-#e5AҾwjd?I1zU,DĨH&ssw.TߢO1Hu A2 ݤ?/b4CT_YjgrB]F˓oO~$~K"[}/*T.:j!)iS яMR9Ӷ:tWt#O>Q2ӻuBymզ}ĹQxd -˨pA~حɑحΛh6`Km_uLaS/l-h:uoJl \TOEtaيFp4%x(yiA A䤤tS ܫZԮ(֣.!٘cQj|LpZB.Bz iiKk[)%nĹb6GEb8^>.Jfqsc HtnnejX(ܫ,Ą!|n7 )B0+䲻`>M&`'|I`d_._Qح[-[A0~.֯XU,V[F? +gh~{:@{i`V.he)75/c R](E!j{]Sjڲ6@pxXly4:-4sMI׍NV| ya^V!'Ї*~pH<׎;'1^O6e~u&q%zT|$D~yh Ww(ss+}tA'HC/5_>QD)3 (goLd66JY^Kۅ{Z\7.I$ 9sDZ#И'!+JU_u'RUJlhcV#Ac%McjJBKyNu)[]ڸ`R^;o , 6NBo;^*hl3Nn;y ^Sɘ?5v,® E,%ҁ6PUܳPZO\nt͙!=IFvNf$9;37'-8EʼnкϣA:UС\AG2$ u[J`\<捿eym,yݍ/gGGsVT{;|w*,^&2"-ћŻ3 8yKKp ;v:嵩D::~̣qcPhiao,-D޵k yWT_Q^&q픶8yi(r$ݖtPq_:ڈ%Zt썞q@9p_UoV ΩdDi0Fch`l3dK9@܊MHR1O;ܖ0C0WIqdcI "dyJ;|Ҿ$Ee.D0̊,0VzIL,Y-]~w'Wo{YN? nyNjQrS.?)ŕ,e? Hif''ZHI㲟V:Bu}JZM`LoR:kp#&q& Kk4lSԀH% $-՚z|柖i\CZEJec+bcQ!>۩\|Klfogr>rǢ@?)/IIoO@;vWH” VBT goT,-9RIYQ/+U9) Ϸ'M/z;ZtDE_|Ue|of DrBb\p-ޖsϽvpg@Y<* YU5)QJP_ք\+Wm2RڹpΆ e 8LjFM+^Rw?N;+ N!sK7ùD̔8g 3sN31 3WeޥاNVԑ5pqT㯉PIBZ8Ƨ t`W=5CP23f iT9Nj ݻ+*Ph G֗`x@0<:F1F%ծC^jZ aY lµ Fu ^C]אfN2&k\ Q jZ#OE-RP+P:#0}%QR8OY$˴2i4ޕMPڕh{R *8Q>(!pt%C5.}%hieض/ln7u+”d I(9Q 0R%e!x`%:/X^ :Uْ0G۬\.R@'Z}u/̸<ӄ[ rHP;GHsP4^HbFCnuQ,E*qPYtIXYtogiӑGЉ9ܘ-H ?EqRse >)C|81[ɲG:FQtPЁI+ Cײ AhnK_ TcmI|8%miYes4GЊ(%VYTB;D3*qᕖ^֜\UUՒX8T-Qb Dؖ%'CFϕkī? Հ|[1QJl5l2jW{ lMP-o}ݜo.!=LxaZqi>* (8}-l8hapd\kj ͣ{s3!ȕV5]& %Wy 6^M_7mo9 % ٿj,q>Z?i>W4!btQh6(ѷ0|gt݀xJz~qfofT{W0Z 3}]hޥ!f<;њޢ7D%~bo,?moiW5-Rt׃I2!7Pa|μsJMmމ D:Ii^x#9n<2/ISs2ڊ7B0gMÜI{?i!#dAW81ig BzwGv3#-<S{fL5bL";d)Km=v́kSC.:e:$KN%VhnYbZ*tʜ]a{0$# 3tΘ%eJk֋2m=k}%U/{xcutN6Wj\p2&' /%~5m|l(["=)-ّVjDg;q6b&[=*iz_f$u!&ٳ˚SI_L[6d>{vYSr˱ l r3uܳg59 'ginmPM٤@txf|FQ[kJ (vCQxo>S{3ܣkY;+sBbK; )hfLk/G̛ftWĕ7KygTw7?vׂooBz)]bWY

z&?)'73q beԗ%S[x\-P ̓>s{ 5O)/=[JUt.*'g:%)h%!%׉:(qK،.ר53͇xum< 'CʉBj1zT=ݍ2csޱmq/j&'W7qԞEz5>sqQ_'*?\_|wbsm'o;݂qMETPӂPG7 Rs2 Nw]iC E Œfٸsa4fȹ&;i]X%,AO!4bhSEDswHcӈW)٥}Ĉ6R]9Ъ-dzuVvSz c»a䵲ʫ5-VjjzvB2f@iVK~H H8六\!PCwb2(U ZmઇJ2͂b;M#jKS2'&ᑞ.:P8JhE=HL4nN:j`Ss9zW}1Cu}[u:$P!;.9Yg0|)XrȈO$}V T&׵2v. nx(cL**بDGB5=|5?Q9UCo5$Hn\gc岃yq.'.w#"%gǍ:kfT'M3ǣ6T~Ћ!ZA-)"'Nl 6HJ2 \Y-.qW&b:F&B4<22.s d,$%@%PL'!lfx>]WaNܜ BA,DZqҀRΠe'e~0JSB\Iެk oD8$NNb:.Z0~v0ha5bSuvbl(99WMtzkRA]\VspIMN& t"p/ )J\\)3` Ϭ٦@ZRB0lЦ![1dp7p&;uwyEu5Nce"JEkiմl!pBx/(F:6 p(!g$K]w?s;ذ!UgW̝<-d/I7X6<iWNJсC~ s7?{Ƒ Kz9'89yI 5R֢tHʎ.Cz3áElN|U]]U]]->"UUB̾OrYy5qЃ_B bZ hy3D)Q g>/xh&g逭ك4Ț쉝xhvifmY{8\ie㐥7q=t_]>{ sZ|0.Wش9y( =49ٷ]ӧo TpB`EƳ/eR0ff҉^>,j>b"Y\P&?՜iMlq::œ.+K.u~իkɸvf6Z~?ߌ_ekFR'ojG'zAkY&,p4F3 /ay HA꼶(ȸ3ʩ5U~H3uEIYU*b9 2#ό8Dh i7Pic[C@(!,*8 ucŤܑC`ċᴫ,ЗF& ť^P+$RTxQOBR_sY x6^v,.z 3TJ$Lݗ ] ,x6ףGjɳ?y1T~}sy׳P}y)/1AxR<5F?z6 @xoΏ#&0h(Ex"zk J%X)Jj']ⷘg@ Ol hgɧM"a誺AYLp\P0(Rw!MPvX'/!ZI)׋O'1I ֳ\X؞4WܠpL9-![*{Yc@"MQ i a8'%e" X0yB2uIpt@KaQp)F^rBG^gEgӒpJ DvV,)Q“}IAx2 &kM5*pڠ *@noOGTҚ'RT\iXTnq\PJD\Qфғ7ȍ&XkBFtv*(Xlna\P& ?%!2FVTL>1(0z.z+6) D(z晠* CITm ÈpVЃbWQAqzH)=e 4QE4G|.SDEb05.>*(RA"j8{(\ `Q£@5;;2 (%/>ەz︞¢#p]0,"k`;5OmPTXgÜ"jD >x ҸoS-0]M|1 We>O |gU8u7u\)_Ɲ)|Z缞i])_'19fZ\n<^jd>8VY\LZf,^Ab3 SJM a$2~voPS=$ 9ΔR}iӢ+HŬFdbO&甑NR+NPN|V>ISKrVqqܩFP/b5$H1kxKd(V (C (nw7Wŧ*gbN샬tYI-~Wnzא#m0kow<=ۼ/^`U.liE:DCVc5@5BVV~ ftre Zr-s߯!4 htz滉F?^~6]t݋5Z~3 +X9Ԇodѽ3p=yJTj=܁7r%EB3>,UK̻1{`}sKr)cym>٧gq1MN6,5x>,NGzJoSLo>NQ՝Vzf#O|f0on.ѪYu^]s~/YvE/4B.g<\~.Nj,Fcmf~Rv%EV NSIV894t>&d?9hӵٗ:(gkuǮ &D *!Q%Э Rշni4H TLb}MWh.y&V8N2b rfȯ&U"芩.1~'-8X.h&K~tddCad9B*]RLA] ״ /¿UZ? 4!x%Ha;smr@3R0iyIL/I "Єh@r2<f|&y&4Ô|f< s r@6>D)J̄x]8w*ǻnq>iιEN9R@i&nQ%®-nou^3Խ"3pݚ[${VU麯hqaӜ=RV&]4t[аM.^}Mq5嫼䘢`5"Ѐ$m--Z)h˨^"҄=~ YBrq MDZѦ>;KAo%^4:,:keiukqO";(oYm,B6̝۪˭\0yn,ׄf)!+e<^:%kfH!dgBew-lIğp]>rЀR,O\AxCK< n{JQi/Y D[d- (i 6秲c2!婉uiqV;v06vTޔhoJ4 ޔQͺ-3:=:=yћd  ROΖOݻ'|eB3wl9f=pTY;nQ]iȮoY(ŘUi-6ӽ]I-XK{o{ZapGiXiG7Y IQuCNQPF;G=^=%&}FN_2{]w %̝R/z5KI_)؟[`P Qcvszjm>v#X.A rPh26]4(ZĀaNf)poALj ElFQJe^ػI%Q@5w,M@/Qھbw}b>]@R.%;iY)Ӓ@:i)LZ̢.Lr5W&<z \7Sn lZiT{P ;q_AˠI ^9%T6 `NC`Dd%GoBLk Cn5R1ZTY3&T ~ʍJw\]^Sd1+,k0&#<ɖVV ۇ|fZiEo77k#^G? "&OF^agbti-# EHejnrɆJ11hyG<?Vnn]HW.E2հŪdsϿ)-?F1ؗ=eO`%pѸn%IP- ^ 8v]gQ\0*@F-je&+ȸAyWN+JrGnYQ\GW0:h#.{Yli\xIgG)6KXvָ_<5F{@ԯKD@6iտOz b.~%)9Azwˉ܅V6L!NRTiRI՟&UT7󫊆vHR؊#J!56Vx ~1a4MM5O3G>α90dԹX3:CYk{KɽF{ϮA^gIG^3Ɛ! {L>ÅUf5,!~`n|:TNlwXzόN{t#c<m4{/ܞpq}Gj\ǽl>&&O[&Q$4nkXL[W yf->]wOLH5E6[d=/^>S~n0Kzٮ]oޤa fzz_Dj)v:}ݭ4p1{uwNew'97Vd{/2y$Iaʓ0:2-E^ 1G"aJ '.$\XgeJV1HG$&XnXȈspDZsR[xVª+%CrnDe-DKREH\s*1HtQY *` 6e¥df q !=!Q*HT ~ &84JO$KD)qgk^XH.A,B{˱a|LX )! 4E2I-<[zj{9F=(9/q:0\/`JY鑿=6nQXۻ_R;ߐ7}~x`R5Ol%Bj4V>QIۋPD4xQ$@j=s*氨gFes1ō`.~m{KŎ9 `9ƂH&7d@w5Rς)K?K. Hg<E 'Y>+|7렑1:l q8xER1@.|xZ72VEs{56Vad*Pvuى)Itq)n߮t?fih}i ZҽF[~ ySCU1d8s-4-a}j`F*l)kڅn x|K1iIT:FN4h;kV5 E@lgݺgxemj݆`!XG$VV ݤ]g{{-褘]O)c.󗍼EV2[WzCiCq޲ [T/6DFCã1HTFuXǩXE! &mmWuXZD*Od3Bz1e.'*D(_?&o,>Y m 󜼼<'/ ]e(]sڐjM\!5 6Eas3{`b𞹅)hW].^ ?U"_`Ԅk`}}`]G|[=CΓ{:;D!-+5]P}1k70z]X}#Ў8e[S]b%!] \I({ Bm{yú6n0t%φ6j%xDnO-ㄳC8g@ Rq+-5xpN;yyō=1N Xg*ywX1VbÚ1!SP]a4sU6 fɋfy" 6>w"k׳$qvz KK̑P&K&LuN,^wQ%=MQ%=mFI E!"I(Pao)p`;%=8彴(D#(dv/ҁz2gv\f?,P0)Z ޣ5:>=t%1ZOP@Xfݭ L#M9r0cP5 J\ XK *]"<%}pˆDGx崡s͸S*zMM%, bM,c +CLK[[h_I SGQh˂"{(Hp:2QpdQEct3лDpuUK 𚊫w([pb!~T]"T3~P\NA5m/-aw.-*ѡ5Y1Fv9l|?%*FKAZxq\RSФfsm,T_4iWG?.l6;+40ʠހ|u;_ss1Fc>ӓ(GM.(Ӯ7+ZpMPD=;6EqHRq&:nj)ےjmg\p"3,&*X6( \)%DPɑpưiQi2P`3Qd`D|D`o5|u>W8 &T1-K[Q3x O:h"5` >a(<i܂7nwI;fn(JOf p)m4zO8Z.K3}T[y>wXUK$1d h1ОG#n} `ZZMb܍85yVKOW7 JbJ0P\ۦof)s]7͔R:[(Н:룔e`XtR:Ax}5[B ˩Zj-ӵGlZm|n/,ٗM}Z n2%T(еAv~`]Omfo \y xx{,$^tq)a ftz}r||GHtdwu ӑ+@ϏLLzd>vY`\}S5oM ԳRHު!C$/}H \>φx>w)0"UG,Z1hWfOK乩9YQVUΩvFe罳n.*fH!qbmG%Z3­JɴnKVjQ,WlhW%&w^?WokpV n^=Xƃ RG:-gyv,bD U4:Y1o\4 nvMXmw?l :`DnR #1b |b]n]\HfoEy}B&ήdr|(3u9[+ bol!;3I@d)bFn #|`@)j4Fp[tp}H {e582be*\FF9~4L^QT,Ѐ%m%<Ť*'!`udZ*:A|F.$\XgV`jG/_9j݋s?{6?ܮdKS[wIMfSR ޳e$'lQo )T*K4u_ht'hJ -t's)Av81 \"ʑI#lEЌxS#mdt!^I;p&qd@i1@#J7{>W(6xi0$1Y-BV?pOܣ̓S-rRMV~=o^vwC{3w~> +Rf>b۸ ތ}n_ƍx3^:RE KrQtwDgQtu񮢋-hġ\ݣ4ͰݗnO _/Ww}d |7:tT2Y3QsW9h8JkL;ZիLuׄ[k%4YK?tް: 6QH6^&@{pJ ^sFe;g԰v6nu/1];ܣ]B2u؜{_KU&dDB%Gl =uu^k=YT 9o"rrnM_tm~u NFSi eQ毒=ugpKRG^pt]k']BL"x b[ ㊶*9.]@-$jCzg߬j< i2d$٩E\HRxHH mMoDk͎!<3?@sn3#ɼ2DHC,[ZQ-ҊxKgzcn Ub΃RT)k DazkQ0n7~>} ž3"6_XMN3a~|OkZ\8+z0닳ofr~{/}Wų_5!->M,Ҽfqo1whbBKؤx%[mX0, AYsO,z5,HH-2j۳DKJ0枻`!Ω[hrХRvͿE sn W<k@_,h1 WCB3Ng(K@pJz+,FR(2ERat4/J3ky[HHsJUkC^9^^ ;!hwSz4$eFdQ}%Ҹ/ Rjyt?jNF-{o^:\8% ()Hj!3(L8Z Ͻe<ϐS8ZO2$oH&s$J(1.~U1X([LX$ Q,sP-s)SR$JJkƪ\֘7bh b*c'80`͵W3%5@ǵˤ*/,ͬSRxl"bC;'@hBXsPlqD ^Jr3 n] b8]9ARؚLJ|:p&hQz`>OpA| RPa_Oߟ.x?qW9x\"<,O3lo&O7㫋3?ʿsk.Z _c>6_p@o.%# Jw3˯?Fm7DKȺ"͌aujOz+fDH z\[D׊i*H[#h%7x%6{@Fɱ7*ZJ/8eӌO:Hz"0(L&&'"x99qp4l (u T9' Lje3™4v%VJ%Q G"KK24={DH HM]aB ұ(-0JbRY:VqMA+9oTٖ,޶hKT/x9VF[j^UT8n" ,EG Q1̂͝a`2%.ϭOEB8SdSNBƦ՗T3W}pz03.5L~:[?~9+ ͗;_. 2os'fzlԿ;b^(C=^<.^ ٷ|" ׮n' PJfGJ)54Lsġ};CHC-"TgnϽpFV}~¿ga?%3?qVyStSȔLL9V!saD>]**̕nnveb#s< #5"Dbم l1H'@ ZHA%j%ǥ621k.J]d J&ˮ=oCy1@}F{~Ο>ā8{x~v3pfgw#<q$>Xxvb3Ooo 5,z`#NXp~JZ=:|PK42Qy>y+K%rFw]A)㽈Q}Nd'w(1 ׁ)vq4rlzɔV%S!nXN! WI=`#J+DhT#>qi*V;՛]LwR{=\j4J[jK{mj:դ-!x-2zs"CCpȐ\Qp舀kO,K6Ҍ!TtJ *BU OLaWhT q;`xQ A6tgSꂩyl-$06P@|zo㻦w]cac#Jٯ$*9n ʩ(T7R}UauwݵcKe{?W|< 5Jv"6zk!>;+~ufxO ||\>wgx9;Ft"c4txu3^^ w>• D#6Gk&;Ys-I mIB)3&*Zs=>3oǼ7> $Jy c'מ]OnOT! p򬛟8('1ܷ}2kK d:Zq@UpEeU8T:-gZ[>syk/17?7>sJgYt(D7S1G?g!. ?;?[||qaP(i\r۔ &-=^ŧ&+m$9EЋmxKʌz1yg`]#J==0o$Iw`b/ѓXK4Pe+'+|m{rs8mHd@L9F 2?eZMˤ|UH/͟J(7\x[α[BΞP9Zr`$*2Jk*IkD-XQξḦVOjr cKzԷ4AtΖqu{qWT3gyߡ`xQ,d4ZP9vPQC@9ci+`b<ًjFjJG T߉.3yhfe>](ӧܭؓ}}uW_5ۢC1wyivy"|?.K Oն(. %ȒCČ0b~_|Y~\Mn}\rqV @nr&z#cŨVB$q9t>C cyF.SĄGB&Jŏ??LbFԐ[m_ N8Y/'ml~2}˧z6~}BVW\3c!*VΩD"7DRH<cA;^Dg U$ЁH<9M ڕ:"komPq(BV@bI;ȒōU  fI)H.4TH vOJbD% #|0–Ve)kOuX B"NFF) (h & )Pd qBƈ5t!n: Z>8{h!g)FQ0?2Hل @`GX%䆌ж[om4ki4z0>†!_nd/R>8[9=*]]mBLSA#jc-GUX*,$VVᡬfY`iT5zxUӃf1(%YwMBvnx s{tB'8#S zD?DNqFZΗiK7!#]Ǘ/nᅴ|X%nݻ;GMlu %>LuZuN/- +L+9mB.GQ#$d*ӛ %g>!I+ ׋tyH]٠")E"U8uHF c[x뾍%VbBdEkUlx[l#a{ }|X|K&|axJ> $50=XJH ,Ϙ1}"N Kuɮ~_Yۅ+it%.""/rv||2_a>=N^|L*Āyni`تC[ ô.vON 8,ݓgll4x/|H-[qZLGs1J kA+ٓs0"DO0'pBw|.œq}mSN$rK`7cϙJ ^- t9]Hs̺Fy}!9M5 e/Wc!W#}ySMVMVMVM]K/,d`uh6Jpԑ #>5PiKmBEh_Z>k>rr'VQ#h\hs'l}0р2.j&G@`g,1 0<`1Akt֤V-yH>7ÌRC|?.&}z]fy(aS 9t6^ i~;SfO)GRǔ.=NQ`76= Vg,o6T+O}KVk+UB\u"O|=yyTd>cPpEx8CS~ocY|>iwosg*Ъ0>O^ ƪ-_nosҿycׇ|> I6T|JeI3]β\Q3ZCe@h0Iy8Y2I5mlti 9.A^9O!+W *Q4m>On`a;*1azb-{dR!Wnh8{cvۦ1h#DLN<6g`) ,Eoc`Yy.W)OW9͘"!fo[> [j+^Ùn ׼n+pǤ Vmmzvpmbp-JQ 6Ux,(6]2r5܂!-rY':=`CέvF2}QEWDO6šYp'avl蹚rb0kX^q Ҝ*qhjo,oc1&H55BPg߱N2DFWžBC![YjZET:|D lMi` sl&K al&S F^%BHB%qGHib|kTsҴw sH"U;CZC27ی#TYUDd{.heV$&Nsne3%h9!0jB"wqlq> ǽ^~sڳ&G3P8]J b}[1zEc . 4{Ƈ]d탽z(#m_'/= G5=A^@"C7;:7D^x:kσl뚰w!0Y6 Hfb15ړFh[O -XGe6 vVcRj՞se4Y(~&K5JGSFSF3jl fB1T> N)A{ hVE, ~D{ F8`^hQBHz2Z"u,Hz\I0V\wNcR9RT2ɱ(Y0\JØzA}D$6\Dvh%0Mrӟ/3f2N_6ɃȔfx/e2 $ļ枥trR }AHM4i NHR 3MK &iMdJͽzsI3tcc4P%xKe1|kgk54€FU9hPl774JTj,QL]1fGePVDPu zҕAY* Kf %tDbq skLT,iKp? :qGgԦ92+Fg]rQCp`ZD_>4\IiE2WW VK>> j_]gyEo]]βv_޹*" I 8r.2 Z 'X5Z+#]_DAF~4$&r$F "-Fs~z3*W<$W,^i,FݩkO){8 ooCyT^YCIJ/l/<~p5xx=i0m񡷼3w_o82Ap`']Tx?8 ! &MA\n!y> MBKc8!V$8""WN8Φi1%9/[*?%&Htg[Mϼ__Ri&_9󚸋f+ $]90"ʸ(j ڲ NռӖ}g4J&">xtxJ!<:`:\ZDyXwfpMбcޒlej.lSW nWTV&IPY=~ܢ|cIRBiTʗGDL HL j#*(S]*u ~gD& #++g41ə`JjxoUB8Hg83A@N&`#$QC(iޙiKRJ;Hn ^JetA9"#H9O1ba)LN< 1ctm>4=SKlʹwQ_$O8,E=S `4U5~΀hof쀰)ww滖OOE}nz,ۼln3/[t^kF:!> ?>/ (Y-fIHAx%Z h41779*=TZ{֢y+mҵV~V`6H10Ǎ8`P8QS{'#<;ϥ\35s:U-N A Sہ7-sRؘp`0$%2"R&$:*$fzȄhA1#Iwql^3:HI uv!c8ʠuT<3=H7]2!u}]\Z5ZZt@-A)ArrzCBa_cmsظa?]@j=Y6XǍ&:Kpw5rY;dSjwc2(r[◬gZ"nWE29 f͇K 4_o$9{I-YnɒnvZgcɢ_b͖l2f -Uvu xCϑ׫,r-+܂8@W\13?-i5/ w x,?uJDUrUIT%'UU:'WCޙw˙ c3'LƊAB(X1VGUT% Ҹ5vt;$\T[(Uz:5WOd3Ԟo 4HxgK {Ý{(0Jb!atb\ɕo";vl'&"5W igUhBR%m\랢J*P cdWA5X(9 dZˁA JnR)ljis4툴pc5$1d $j乓 V;ƫ: Ɲ̱to֑!ufj=8^TsSLo=Wq-%J:?Lw`k[M?JsYge땁"-rreHj\U$ HjbzS@2}5<3Pm&Z(tX,ldiG3ȋV>U)w0Hw|azTt)xl܇UIy¾j6Rc"(+.ϱ`ϣoW eG @rYe'~ 'Q/DpIU/&ViTYOM+0%knV(u !Hx>_?/FbPe`GnGUF x3B\YeX|\vgԔ^ M)Ya>/MЏara{p#ti洕ap,j5cFxÍ6Y|U K6p ZQ+NvvM']!>ŜOZ!5Cє`uֹB\o1K#hLdXyÊعYS-WèÒ{5+PY"Pc4n;R1uPWkET&mKmpZvnIk̤FPn/͔mA lm&i@.dZwSH//tEƤܩX T:NԿaǓ +7_)jKfNrO?{TC'ȌWDeZa]);A)VPvb?4h" *WF.U6pncLy(XSiXᙱh%HJ'9u=P$$ '.wZ υm zt\:O=|DLj@.Pj%T7I80hW`Aa/ȩ }oGÜ ιXRu!ul(;kJTKv\mJi5ODkA+PǼ=I'9wV*B+EnpOLAA3cG3X 5K- -ݾ8=XR3~1__tXL/޽`y t_()p?Zh)`j헍˂GA/Ѓm4Ok_U8eg^qJM޶p9ܙBqa3XiqLr+TU ,-4U;E[1lfO%aH﹕NZWr bnDZ8 R& ?a౵H*59 "^2F:PNZX$ Ȇ 0CTB. kbNjӎya>*[6Ôc`G7as "Ζ%?tӇ `y\Mg};dn+% ?^! +ZS0N)_z'59RukR'+q{2J>t"Қ!a$m l)i9@RSF@bg*Š(џ&^PlMFv$)f>"Z+0(/ !P^APQ+ ±YkEm6]%ji]*ZIRةvRQ<9bɄHDTKi ீEbVZ(*& ?$d1zU詤zmC)qp#c@rk3Bc,` Xr# 똗oud$&<"z@zଂ㮇*,,g0D԰t|8ԌVbS H' /g ː"kJaH^kwJљ+:.3rWoD5 %WdI5Z9FT p{j hZs{YdTF9A,TPh8` $LAV C5ӼҔk`|Ak'C+7.a:QMkda\vjcUHĘ#p[[T<6S?i+ fhQߟZysq &򀹠B<5{8vвg*%6ݷg(GLM"-%xӖj v.?g0WGBլ|*/[aT,`TQyp61XIy@DVkS$}gw Y8#u7\]t 4nL>F1`"D%˨H.AY9u\Dv^RXTKQedSRښ*նjP@BҝT?0_xc J2A<]`,qp9]TtQS% 2?vDkGR 2'NgqW% )Υ쮙L\r2&(OH TM/9.ᄎ] ˗Yάje>e7~j5~FDnZ..u,,_a.V=!t\L77p=˖_®wcNUnµ˕xg[?}|1f4[C7.j 2eV.4 {/҂(]Кzѣ2nsy1t1 q?jWrQ`K#;m qIWUTw-7-V^ݛߣ~Bo%FC-hT^W] UZZu[ZUі e@Ӆ.uJӅ *4i{5p 8 9Ec- zާr{7sy`[#B-6w[lMDLu:pIx=J%RDp2ӄ)9iNr.ReQFՄKեk 3{⦡&qĂL.ӚaɐS{Lf-5(9&5y"=XWIx\?ᅮ_>XNlԼ >I) rQrE덬GCF 6c uoC3OsJR4C]Qt\S %uZ9qH~ޒKE 47i37EF'yEx(ZtRXWA\J֘ϞeVIZj[sy:&7FTBS`2 22Itj&qRSgWv/ʣ :1.ndfB?{ }F^!`P1V"!hXD_s56fFtEpZyl紤°\\j2yѸ;nΌP7"ft')̉O'; ̝/&gY`YR$iHm4fBA#Bו<9,YQEj41H>/3R0HE>vTxy%0:Pb-P⃁%6eQ [c"/g~H aNʹknvӇJ.*Hƹ"mw_*2Fі{*+A >¡ôXwx0p2pZqp\4Fu}:/u}J4ZX}mI OJ "u;,= ]tY/ ,=r,q|Pj.73D|@6vԣ' k6oWwIh8hbN22$xf$$$\8<2S+$X2H!+YrEŸԟ_OdJ&ڄFt6ljlw*ifN!xJE %}bjF8X_A$/Ѩhz 6CͿцw:+TeaB̯MuY6I_ JCoft8QȺ C`pWҪ*K걸;=.:%=:,W]ajty<:I*OR (E.A^j18) 3CT>FS?hMb_!(:0]b6:k)5&<~>Vߺ?~+CZ#JEykd:zUgg* hӇb !psaV}-<LjO &>ПW,'ʠ` ̄#4\MzF%x!DpH{3[-kCWZ}340P}c9>HU30C^;̜ LOX+g-==S0BgBh*Sz-'__X}zmuK ΅=78PhT0ƇBj% Ziv:6~_Q1 qt"4}΁MJ2:GRmyȦmi} "xs9lK<"~cq*}ѴCf`TfAEB&#gEvvVt[vQMU:lX5ŸjC)AyX ).=Z@ܿ~Z(#|goy˫(˿^zǢW&~>;85/<8urx Ioz5R+ YNE&o30XȿDQ6aŒV)ghKX3Bxo/Z?7xY =q=,~&zz5%SMR͵7ktHC"0_,+$6t -Z~y$|J"$3/33E6-jlc,s\B$#E)K$,g:-O0"u;TK4+]B%ؾ؅ Fmϊ66P^iB7ͩIq" -t.1Y%k)1`W CJ ڀń|gJRNq gSeL9jgJ&RgJQ:3`S"1\7'8)0ƓN22$Ad42 O8A\F=>M'5c]ɖbDAtD4Jb&???7y]?vᷧL\rˣzQ]n ~BiaJkkI13wonX48^j6QnSUkn^z|2grtD[4Gf7EM}D 57w+!p] k@%g(hwdFt"^`:OoߖGDW޻648҇[!㟦n ů7g/wp׋0fۛeI]nIyGX*˿\QSU:u;TO;[ Ba* O*\{P;}z|pD5ԄSe\bS*jT uل[.wwi>6sW^]Sm[p{x8 29P{hiys6}ys'8oniFQUy+I\[<1FVkF1\6i[JDAL 7ƨ3Ha2D"e"4 $\jrJ9g4aH ҎI0ȼşH} ~D9G}PaHSCtgʚȑ_ae٢7Xú5/;= ؚ J$KDe:eQ/O0$rR^(vJ'rz[%TH Jd2Z' ﱥ^ .8*DjVqJqEe܃!ˤdviG&uKQl!FgLp^:+FXi'jA'QZBQ Nu R >Bt*@)eiV+ʖPU҈{$#Ly߇(uIymCek LJ bvޅk _2*u0G)'R%c|wtDo>? TDhoW_wu⩗d'_l/gO˧O}NLoOIhjA_t6dw^%b${Fn'J`(2gƶRǶ4G%k=55) K(/H:ot&%C`ֈ@"JpC[0<aQB WڒZ̄@BZH͍G7B#b#)>.BNf Fcd4JŐSQZI`Yo:ڤ(4nbCqV0F4n4qd|ZFsY,n>ߜ!Eh"ҹa;eTjD{@ofWunɓJ}Qg*uݙ!*r'?: aV_~%d˽'{Ó8!iOҝZEaIqL[ϗН\zZnhW.N;([ ձȪ8%dS6`Q'8<(BYXpIya$E`e^,&_gm!R ,)ue|LJ2t8Mʂ7Be4P+B|=s2}NOـs@,П_-lg:`07K)͂`dH cc}L~ W8kI{M*wNu*q'@/LO`Q]k|!nm?ѮyR P5 +$A^{ Pҕ;_܅N_[i#!1I!gӆT S0$3ES"l|bi0|rL:RqJ /ˤFHΉQz, Ac KQ 2UMfh}õw^3t~YA 5%@?_L JzȔ|Yğ7Ӌe 2_$H4x W/ſ̵zp 5LMgٛOo҄Uw^1lSS~ޚ*SjFQ͌Twʄ 9/5st^RHj ).QZlx|Jat\G{3SJ58AD)('Kg-+]Ji0!0p "M2&jQ/uDDp Ǡ1yc#]R%3iZez_Z41ّh8vMe*JDݝRѴ;|J(\w2R ˾T*if*Ls Xo.'x7%Rihaz8+ĭȒC~S{qF&v~J wҎ$=6@$ާF)f*&TLښ_8..]mm@pǭ~,}C^<<,\_eW˻;C3L57Y`>`,W$ZZEB$k>6Kg7o!Tx{%eJi(qR;?P*IJ˲Je[6 25Ȗ"XFDJ4=1BLtJFpE% ;%mlWcYI2iI"%2,Ȫ4sQEtp*z<\HM;in8_i;v@t_|jrk7`*V&aZq$3 .wߋEd9Te`\_)fڋΖm4Pe\L:bbZ82PZ0^|CjzF2[na3Hmg3s)gc27-5OK{OgCcF "rRK3;F6t`f&ƉybO&V/{D֦gO= ME#V"- t&8ahi47/H4r^̝{"Zp P#~@TJ1x߀ g)j5F%$ vFsft~)vHK`#͂w#vr="Lݒ9Fl0LҍBn7|I#{@lE3f/7NZP>u$QdO{Jv.cdja_M*j?A߄88Y4QٿPiWҁF\?LT ^kt2WR`$e=Ty-A"M7'5TjylĹ^6B+f M/A@Si[5!@uc؆*&(S;+\>"Ku1Ǎ6bLY( ?j50dpM=БOw)m=kQE;G)LhRyKrZ),դFFS5~Z5io׀H ? ިNkd*ua ldm|ÁFB&걖ʘVS yW0rZ0 }_*[^Sazf8-' /ȳ4͠ǕUf(B>\ e-!T0Q9QBH) ]>踁"64hf]D S|)9n̢ 8JgA6rOI /$ѕ[6 2 Q:%JzA$QI^eHh8+(v34G|8/h5.FζSb_mB.0\y 00?WOA+.q(k 70M""l"TFಧu )TIǹs?qͬkӆuM @bA#ƵhNepN ĥP@HKV[AB@vV+Ť0q\ ̇)]q^-WE?Hkۻ;{E{UbUq_|]jA ?U=ꪸ sE͇C\ڸ WwM.6Zزve3J{`d*Kv; `7ӴtywtCkV} z!OO-@VBj ՉGeѣ{I59(-N[cG5&2sz$+0+}I59:YY){ Yg%^|MjRr'ĉ]rGX)cᅈQiG<;-lݩmyAH؂ n5)4`8_&'Q*΋̽ ث9̽ԦR4l*2]H)4H>;& T_0ɉE,@t且gTW%*$'" "/;ꜜ켙JEK,zq4Y?1,FDE;kR_U~K*Y"6آEFLVJQJ rۚЮ*zYuh! )YOM*gZ/G`6˪hOqs?/\|lHl'$$baNlM|/V(,0ge:*jrZ{3V Ba0r݃4;ok(EAm+{76JWo~0V)ূ8`;v?@*&s?YJ~Ư 0Z![>:wD +ݓ[>>% S;UiJy%c_J6vkCKӓ6N;J[gՒ#~>DPWTy18cA>Ł+g)"@+15o#҂B- 55l#䡣WEvW򴍾VD'&^S,=#-$ڢ=i0'ȞO画+ htvS>jP~T7 պRfx@;=7uvk3 Сͼ,xKϓ??Cv{Wb{ X\lOO.}xpa~"]'E/H^"yы]3MGT8;%hg~[AtXpѲ2zl7ȃ"][oc7+F^]4$ 6 ykA[ߢ|;%G#H#Xd*fl~*q7ofE\-ٵo~\f>+5OnM*xniE~JsfZ[Uxb\ZS+n!gz0l*j5_ jJu6%fe! 31RY>"'U{9T;4sghITĒ9iaSCL&膚2E~JV=d͜o/($T◫Vqыb;̭Ҡ@բR3K7SKC&X@!s~n.-qjFF]@ʾVxa/ȈK)Eח iMjT0*Lzig'# 0kx/Y'yOޓ%jLJCQx?Hv,>.XΖ6(5҇Y~_7C韦?|K* +rPlP,P{rn_Ndp/6 '8Ͳl(RjiUt4620ByP{ޤyIi!e9ztC=sj#c&kyhxed%x%xn:w,}*oY() 2q*~^ES/dND@aMT@Y;̹ zԂb#,%t.o]xz6xa9WO7x!K05#AhHˌ\_JՑs.Ps{JBՅIZ qNXF]Ij̙Qd)QjеB9,2>C,Я3i(dF#O3We']O+.K]S7W ,7*Y6$ΪA4ٕRdD8+qK9SkcF5vKM嵕.q(3I`8^ W5e+l4DEouȉ''b ji2aFN;XKWWwdޯg?]6"Qex[ \AGs=GIB^άL]!Ҥ<1U. dҽ]ϱ 9J2\ujPۖbÝ|Ք?o׹i%vm-b/,=@VbyҨf8e@g';6FY;v;_\NpX5)!GȇŻ I_=~U]yhz5tqP{gXA?^,W:{x_/Od' D[XfálA뜒&P{ߧϴF_ $|%| 4lUOߛxKɟQUCJ~)W^8%Wmd/rr]v5"g'<L\Z.?X_w]c;neȨvG3:E!WYH.@K.Gspb5A5lDam8rIrAjMm)uI2K!Hzu)}IA@V!E[3F$=912vc{q#%J{gAmx gq˾{k GnXHִecj6 RUЀGl%9:Lzkϐ3RX1²iN+Z|Ӝ84#O k$ Rʖݙ $pdg8$'np4OMrrXB G6VaO;*%h -iWjv j.-G 9@6H`̭6HHK5ݫV Z*"xƌZX`aN9U&9RE B8B6"6թj4aS*p-1BUk5Zrs!K(JӪP#8nz UAe'sd(K}X5w1f>I&6fr9 l<#*E`($$ 00@N#͆+3tQcTDux,rpzC$\,<3Gu @'$-kd4&k&1y!Ϙq]:{_#)ȲZK2c-ҕoR*~Ef|V<3idƝ"uO82Y[}o*/]]}X<ˇp6;[mH 7V5 .nϾ.wI?_Ͼ~ZS}e\^~ط2c S[j-te% PsV .87R_ܾ^1wv`9n75?/b/<_Y[be/P;89fJ ~A#pkۻYgcbYlhNL6V߸ FcM+7S~k=.=>D=B?z9;bAHEyثM #@wI4p1R8YEҖ6 eDUC]ZP Uj4tbFF~B*N{+Y؟FC7F$*Īh9Vzj~ aSj.'9@ c .lEu=É6(ٷGqZ;a_OkH`C9m(a[y*+8!;jO y^(yQB6 5G2s炣gf= A0W Pj]em"@~XGG63z44NjBǤ(u4gdc}9Xb[\ ?@)+xA2ǭ+?zx (TJcSXušMA{ {O-\A!w8$F+EC~=iŨ53чG71>\[3tzE#z0ǰu?8X1rMᶃ&y[ ZsQXo6Lu']?kAԼ+K:yjmUkSk@dYG18Q#!WLeKG!bv\0,m FTBȬĊX?wݠnΏTDӒ ܖԄ#-ĩ~BbRHƗjaHW혩 S0Z^rTR]\, @Ə\ێJ,i(pmzΟ_4W'rRDg弖Zfk}|~y}P._UH'QBS[#xMVh#d0V )nɤJL#q!QԢ )ӿx!3_/Du BcMJwj1Փ͈fF꧴k ΟYٓN_sZ?lדoR̛{5$Qyt1]f⦖ ;Av ROߒ75%1!'Z~x0 )c6' ~J-М ?NZ6PDz r3'ӖЂa;-@A77N{Ws#>lC-81mepP(`wh.3LD2]b#%M$ǻ>~>)Bnp;FNI8Ixwn+JN^ |CRjǫu1ǏWQv[P]q gdGfɱqwʇve H9K_$dpy]$Ϸxt= J q$!<}ucϗW/֗g/4"/fryFuZN ^ 0@֖ JRC2\ex\ hB^OS  \m F9Ѿ6!XCkK#N8&̸PJg`%xO=b xQE$*a'kU![˸ |Q('-D19ic7jSS$H*& Xżiyb V߄0WJ2#b}5<0d[0% TCU,c_ПԒ ɕfF]uϰ LXׅƓ/=rc%S&4ʳVTN?'5(= =T(e<^f8ɮt'z2jj"#nC2.(,VT2.p]VITc4hG \uLHZeL^qKHF8)KNu΁WJyƣV T *:e(U<~9vԁZ!lo|SQ=u쵀2xwhE؞X ڒR wp%:A=ԍSʊJ!3N3 <6p&aBOSla}-pB(z!sO}3{cp5L\OsT8K]IvU`s#j6W>׭$WQXl4-.M#7:yK2hHoz%)'=͒TjHכ܈^#FIh% OdʝbF8{6)!^`O˨UiS#Ӕ07CpHh ɾg`Ho^͛]P{RL2R)OSA}&Y&$L9MU$sjtB^Ahx0>ak&V*±r}v5d0G˪j"JNn7v DROg=z |O'4I>]md݇?~JBw~#}mi#6M(p;$8 a8e&eL%'72iA[/=a%Kg1t 6 kf R%@x€i0"yYDjMT Д bq)ܗ$<Х:tpQhn:a9Dp Č\l|ϼv3ͦ a(8?=o6M;i9Q4KɋVMn42*\Ь.$d߇bk?O09 `dy~BLwdX3tcO^|Xb{',)bםum%;?N/zx?yr&.|σk_&Lgeh\Ϛ~jZ[l%|",U/^ٿ$pBf0>{c85?)|7S\7?|/7g/o'WC\LFa7 |v3z'ݫ¯׷ỷI$\jYdp$ߤxpzLox#0WW?*`ovElݯ8O~3<_Fq?_4E-̬!?^s4Z*TZ73ԢǃᅥĖe swR-w|hv]ސ:Rcg'Q~$}HImYW3G9}Dܜtba%DA8hf:_lp^!NCM.2ˇƸM޼$\YE0o9?LaԀB;"ZԱLJn a`֔PJS`yS6A4KLSlTe$c$%7Ԃ mso:IEa9Pkt.˂ /{,;Çѡs /i*isy!e"b:ߝF1^ !g߬8"Ԭu4J-h{@)0H}l:+h7 yݰ>aP>jL./cDxbr0UɋYRܔ nqJ,vS++"ߠ䀝l9YOL0kt)0ݖ_ӽ&vl T[{_]DZulG;eg ?߬ l7ԏG4PKYe3h#ظ|sO~8)Y' >pi mcp%m]wZCl[C՘uw>;؝ xFÊLn91{.݃dq_ބ,PYZ4vkDLtp."חkBoyy F"U^_L3ӼyL"E4LYL2i2NYIe \B،M%$1/hET P!X*i35G*XvZ:V5`} p4jzx YTфSL.eiB$oHP(jH} -Ju"ƨ,DY"N5OéHR:O2U&K%gR,0J4!tGDFkBQfаX{c(c Ʀ"=>8.Z FWiƴHԈVө0f/SQuPFtfVvAW kNBW8S']1cZ,(g%ؓb/ ]ܺ6"kP%ލa'qZ>rxWZ(ҖzETR{"P95T,8v* K1X(©TQNJc(0ooI QZn{ ]\Ճ>0 j(]whz|wzx&pC %OG@9-~8HrjLv*GU' RJ)'CU8@Ui3Z8:r}NQVw2 D(а 8%mo=F5+@gҖw%ϞR0mܽURgYLƞH4K KJA3ϴɸ#t7/t$.%i Kث >@ExYj2I@@s׍E3NiƷ.w$"J1ΫDNsTUjv"KDׂQ;wyMdID:SnCk;{O#Bv zZKy8:̲+|9p-,EI\%m9`+J~lu56veYUX*Veڌ'BJ|XcxB1Wq/[Xz/f%|N&>ZAk ڣ|ɮ5:)$ цЊ.L꠭אzՊd5: L+9z]6Ej``I!\xFLHд'%)N gQ{lrjD!K[>N>mkH߀wVu֍3.@.(Ra Ca07EgNQ\L$\^(8<RgiJ b_c]GZPKe`kmtHjc"riA u#+Q|]zfeG[{jQ*EFq?iJ΋f촂@p楦w2 q &qM_d:傠WIm`&՜H_Ӟ ;TBv'ŚKA̹_eɡEc b9GDn{ve)_"`&bCABҊf ˥a JHlgxW> 9EB2d4ԡo ߽r  +8;87XH6·-[+uavg[~̼mqh CT_gb uCsDƼ"jUt95 5%1gFzT& _qMw"|Bt3!=SݷJ؝j+cKܔ3PF%1:D^.fpA2a VIhHVs҃( #Ud*FBApRi@8KxcR67A?aj(F6afxp?x"wt!_߅b4F N_ <.5d=#UlĆi]g {w ֖71Ƽg^to}L&dF#oĠ}ouvfrS4X9 E$!\D;Tc9՝oI hA:vAtv;5cl4K߷v lyB7n'ZnuW.;˔!Q(!l[(LbkGqgWewY92M>7C_䓐x؋/L10dܨƽ}lCNˡ\~$稻qRpw=rҴ ^j\蛥CV{6l1g4)#N6gy`kÐ) xy2krY# 6uoa.(-=&KI`B$8p)sa1 WC.86{]3{KӚrH[ay)UFvVǗZkXӚFyDg.ĆF=%*kSo>wS?n>.fnMI7ydܴ`e 1^2mS}JsrW0Z&áazAViK6PKqZ3E6kNfsEm毝0D`ȵze^D5]] hXꘂ!8utAyMG -v ܙ]V~gK!hӨ Ԋd. Z j*Ykk%XJ5"'çTkn-EO^?,Qm8XGW J&q<͕yutߝ c}H+N2(q{ȒvSn]1(cnG 0C:vLe%>ߩpM)W-4tݑ u2ly݉)Li?,iŒOXGY'OaK{vy0c[Iэj4&y_M~zsBoNI|l?v9S}£ suUwD,?Xcs)wϢhTف\zY߮7|^B7K2i|Uƣko' zf vv+PX`,xt"P%CmѸ''.8=rić"NI[RXeEmbO,y[*oh9w?T ą]2L#\wB{fo#" >f'cQ{CBPR[sւ1ҳl! yEz.ue\-U "/Fo++MLf2Ye3t> I1`j3H#0BAbnFfYCvw10ccBBpfPKI*έr/*ai0zsAd ;N^u m bLд'_~z)U:Z$kӴ@<w0 -uVgg&KVrK^ESy'P9J M[>ꤓYX (o :VpmWw)B6EySw\Á/3"R[:gV+<{/ySFoPiDVc "F&P~y4|]״HTՒ҆KkÜ˵_\J $~C#cpdzCQ"_{HoZ}8i1=-9h ]V#dhuc`={\}*&jovZ@RRA6$*g^I՚1k Fd!M{bO/\PĘUO?W=WHgWĊ#|m+ݸ2z,3|F&j ip'|#{N|8f~p~MxK{'oV[^fP<"hnݮ,xsn&n"l9ʾ꼩q}[oi+f~R=k{'9>P"ٻSms0?\ W )2=%tAC[UsEm㯻c'dmdv酒)\h1`>ŵm.dXCGIs%B*NʡC\"u XJ {NÈ)b~kfN8Tč'e(k"\cN6^čG gS`A9’!zLc?ي\g\P\~vςۖޝl%[BqO%I\MVxK{ޫOZBӈ(1 i8Ԙs>QeЁ˘w t*)N\P0z!u6A*?|~~|GUqbׁ.N[ $dOƹEF阃X*v|g ւڻ{vYN!Zn)ޕq$B;K*L}5`/#4kyhF^(]M,,AŬ/)_D4w~UD"_NƥԜ0ɕ&2瓥 `I9S`H.Inӂ*m$8YuR)ɚ5}&wq혳SЃuFX"&P JsG$(( Q&ǂ4,H)I&غX8Ik%p9L95 ꍀSu`q[A%r9erM=u9YRIQiT~K͹wUth7FhQ*A3IDsV̧fgLM En^~$JД*[zG@) +RbY1ТucX'4vIP--(G(2k 1Q ߮$jemb~t RrTrg뗻{>;M-C1.Z\i\>F͐.+ RCP}58H@]mPlrRBIgfn։[PYC -rͧ?Z;1`|阬}@xs~W9|B~\`z݇WYk<3^Vh=d_ww]u2zt}ю.^h\7 |zSSrSGOWB[εuDSQ{Ŗգ<TkhZ=0wX-F>g8P3S,խ3Wߍ2;z|˵3o15ENA8x0.tuwÅN.Džn :QăWi93[B`~Gyh)T4I*,N OKʹK\gaX+6M_mh b) (M4%9 B$d$(9ցR3AwF6Z!MضvO-C3:8CCWp\R3& .1RR0`XZ[Ml(X׸xņ2Sj9!Gx<=~d/ @7= _Ϝ n=--Xpx٬RQ Yyu&,Jη]ߚﶄTL*]r< <8PќO^(w6ˋun?]4[ Qnr%P(|8ͮ?bq +Q*!vbE,BR7΅OӬ }n_'E:1Ci7˿Da{W,玟ŽxsKr6u@$Ӫ ]8OG2zDF'g[eQnB" Ut=K}=2| MG4X;G[DXdG.䅬=w"XCNCQ\O5GY'B 8K//a"1#_i9sWHRe]Y}ZWb!\xSաi_mT( -eJd'oiQ-oQWmoIhL0~RWuvogm5(cNGr3M]^uNu*h\[mCM[* |Ĩ[A4ӳ:bQڜPJC4(p0)"bS&r@48i^ ] )z&&)8b3Zxfkō!a`gX:DI%ɴVԆPtTFp1%C)'fءEw1gj# ^grNF(ב6ƣsދ=Q%H * pj sxd41jvij}kx5 îv7iz=Mԫn_꒓ 2%vj'z57.xA?Y~Y3ꮟ*1K{}'h;/(Yll{ g?e+o./a0&tL?o_t6_nS ۘon/) )Rv1߻VnOMPk"5?-cUG՞ƹ86'U=}QL+Eexg;\qlͥ9ap|ڣ4=E4,ܣ%_ )x45<hf 4`^e$яV51bnx4ΣJ\?oI&G""28KLd08 R8NUV cnJ] Fne3AX(>n3+^nVi-m:Z<"0}ƙw r8TW7,_.P!W\"=۰CD~y Y<^0@@qvD.i[׃V!UB rislW"ϫzz)uK@T'OC^PAh00:?EO_K[ h,Iy:j濆zc L=lӽ+ZhTs.-k XKR62i&HDpS/5%F im{i ;|emZk05ku]=gՒN1QR5:(UQ)uwnK{#do$uW9L-܀81멝,:~ɏ˩%׭{b[=]t8qJf:b8>^w[B"$ l6 '}"J'VD%m=U\U4_+T]]iނMA ''0[PA ǭB)+/eki3l,c#1IҎJj"<׀_Uvtε\)`tsqBCAOL֚,g3\}쥣9'EPv\y>TsVv::/ bL Az W%TIqDWQ  Ȳ`oh r'k9vq6 3HrU\)WAU e&`ce#Xu2AhJN39G{3{@]8HnQnOlA!v'uzBEOC3R愬\$wS:j@ '$0K0nmN7|&qiLD$~4^M"I Ψ#8*h| |=`Fit](1$tt S$q8EArhVbC:olhBv H )lD.gA |\-_r˻[N .w^plTv|vPУX\j?@>}8LTRN&P0SA&/6%dJ$K8n6J'p R8nZɴ3!.Xe$0G8;bSEyR>`l+GԜ|V;_!%.{z5Q1 E3A5ϔphTLP'3 Ȣrt3%S욑[In8Ñ[ڛ?y[v0+n/f14P4ޚІL},Z-[lqj.ɡӓA5T+2NJA%dcT1}xnN*_Vqmb֕\AIcM=Hg('!8pŒÁgJ'D+]:P,R]=>Uq6/Pon"?l8/~ σqS;g7˞}NƓ^ӷ^4+Y;Fr&V+߬a/OWk4ve5~@AO5iU`pN8ZBk=BqTPi`q{"Qr$UldL m>..S9/ֲ5yzJNo; AjZ`ΡZ2f"#@ >OUe"CkƚcQ,Bauzr{RmYeua/]W=P]&ao#0=~eQcUCvNYgHo[9xxǐ!^fX$ SseZF*x5Bwr᫧{h\xmyS 謧Bte;Uyz|>%=e=5D#+ʷLqi$o^Βٻ7r#%KZ.`qd#M6W43 )ɖm؞VwUb=8϶sW:-=(B5}!:eʠcJs0Bh<$0 % ]v**)USg CHUxEkC(:$hRXN OCa}-b'=|% lM֞B DJYS - Nj;mPm26)90E)0{9{yΧ`)F>Opc6E} >Hbx/G{ !y, KWʙ":f*{YxrOx2;_8YB9M ](VT) `tb9X(WD#J2n@2Xۤ4ZDғ(-`Oy>` A 1F $#7U{-Մ@Vw0i @FN"wFpkNJ(QZفjyG# _"hiVsijU:*] iXb*@5mLK3f5Hbl-'u 2e.#QЫ`I M3m J - 94q>VBz"Q KFiu+7,HU@"l RmJb JZar zb#&2& cPeQ'peNPPT],v>66B>WvilSgL"`IF0rf~+3Me١J/jEsE5jQ>c&H6`@YخT-:fr<*,L׫PRpݙXʫY~*\{qԐ+~|ZY`TuH[#+UַYy*91JGi#5R(SZ.)@) Yxcmavy*p2j/ciF6)c{4♬n>5,W=5S2i]^c4jPp9\QZ-Р(ܨi@!)_O$c|i݁W` 4>h#<(!5Q2QUey*TJrlnmS); B50ADBmBftU.QִMxA̤Zנr _*TȹÞB-TY.&T|]Dj@"}oҧx^C=_`ל}Z2XOpVVu~Nq.O"?fOnɦNX ۡn7ĆjņfZPFφn,5P|$+@IR.uPm}aJC@!|1+`xLg,2D3_'wrLP 28Uwjznp[dqag1Ìh(6}~Niԯei @ȟA ng5 @L8{W8gtz*Ğd/\ְv'z1ĘH3gͱigK Hn5_2}r2]v؈~fS)|5ei[$)VVUtBɼ #CUiPbbRKVSeLUlaB,rP:2'a} DpL%$V2'mh.i-{<%C汸g.0|hM$uZ_DDG?v?*\gF )VD+@QlɤfNsۜ*-|Ð_>\}dd3N1ԾD-J In#w!ڒi5PMWBuE2t6 {r`ܧmͤ$3onU۞B&5R!,xV$\iٺ"Q.8c2V"C4_Q)Rw~g CeXHxᄐExvfx5ƼfFky~N5u3+z `ajT N4 T3g#g]ЉOfv4;\+{4n+ pC;g֞?pAwr)G7 ڞ!S@4Fx52Vy ( Tqn0CǣƒCi#k]> ˅=]`M yj~xUՓ: P?/(B`~=ǟ_.X6/̛;8~M~˷lb5.觬X]\ ??]X͕ϟ\=24SŅȔ&}'wT$WF6 U֦81q9TYޗ޿O& c辑tvO_+Z?)@e D\@e D\;'p[h,mU(<+m"bD13eÀyPso᷷os z;YΰHS듸xϜ?KB燷V,td>4Ly)-Uvuxy~tg.;sܙ\ݙ<3[F*A(JdIV~*W"baA:sׇˇ&x.F<9ng5.u/RFo::UE J[e @e1Q$QjǻF;"-}Zj@ #Ar˷\{K=䜫f1Vok"EDmbPR QC Y0X{e/GvHbNّ\IK&<´256fPv?RCO^w,6C8]R|…RÍ[< dމU.@sy@w KRk hFxc\̏Q(+Ǜ/J4ujP38uC[4z >@&HF O[پR1^Iu,F7Wd4~G|<]^=MJ[i{6_6 JXy@zC6 $z|Э`hĢ7af0rD)v}2z Η0t>s{yh:-O襤FY`'Na3[/gBVt]6]P 2bEb?w-R]k3֐^cmChڇ_[-3ɖSi7yÕ)nTӤh~멪(FU9.jeoT?A*ԵS2V+g /]xzH^+S2ssJfӗ۹_Ua$ȉ#O "_>^^eP6u3LݖLY|+s_<尮kw; ]pGmM; mj{`WdKubTV;Fͫ>zn1-j:p^5ݞ`A:{vsKRoSoY}( Nj[ށF4 iJ-GQD`a+FKEXqlb36U=WyyݪXrqמT`(JWxRl HsS I6Eʽ|ؘs=,3Pöϸ>H1}=:y»M-꡸_vEŸj"pOa~@8O.|~SMɰ5`Mp݇7aO7o&]g{x&nuE ?O!',f1l!ěɩ%'9- !ºPJJĠ" ^y+'ӂsҸXy23(F(/"=ʐNW ׹E 5L4C)͡Tk%HE82Mvٸ-R-Vy=S-t&mЏI)D-9\4k3#Zw؞M!P`_At&`z*,cf࿴yl@RB:ɋ46tr; P%FVBnd1 sy 3p 2.-\KAf3Z?8MKGg^jlg,$o{oi_5`V":Wc5)]m«sDsPczuoiǤ#%eթmM{?/huTH6l;P>~ YX($XKYluRb J4T4jw%b*ҏɉ+*Z3_tߚhO:B~F42 ȇt%=jdKm?y6=h 2f{;RV7 V*pVk=G{{S" }W. F0%E0^guQP٪`pBtɱ']w_K~ܳkۜO%4[;*ךť=8}FŽ,s!"1s-ZS w lIꪋ/hv݁lD#zs2F9߳*2ul{_)HMMW-S~*G"d2*X`E%<Gjy~N88\nvü>-kyɯpLu?EhO Vt'*r>r}4]O3׶3LŇ0 ags mf'!ВMk *?{ȑ_%pR]0ps|rOl&!pfHjBڕ=U]*Bؓyᴶ֦{DSÅzI GBx$ #9h xA5&*^Hf@B*4[MٱiGX88[lb]v0[R)s,1wjlQP};IKέ:RCsOL-9jOAt;59`E!"H ' 9hlu4GU/OȷRޑy/Ɇv`>mT(qcV61L0um!&0_F):i+n1YL'g5R'LRt9}.a` PHa^bK%PфǠy :y :7 %ZBVt.yᲈIg Fh3yB0̨,hLw턒/4w.٥o]Z .c NH=h!ϋq͢J)%7ˬW9p `foVMi09sRĘ>"SZ 4R% |v ۵`B+h+6xB6kEdySck9IcE:aI/ IPeDmպ4uKrz L'W q3VjߤfF5=U,4rA%վ,j[2!IiI[k >we{,ٔNaPn`2|S,?tb0LҜ[:V2Щ2A~=wV"ˇr.s s@C޻~0ЕbNm䰊F>B!dmy4ǯ\5/'Moft6,(_yk9lN.HJzӰS;rJjaȺld!Q4J+2Sf|Ә<0~{vE@R/Z7Xѓ~ɺRњX wS 9e2A >i~o`] Be,op,-ˤ{#&RdJ PW24 \e&l'Ţe c81,*P?_M~I+WB7Gd:tZ Qv{B&vC-+{8F^FTQ!7Vhrrf23Lf "$G+?VlD]4 ˥{9RF:ܽ֟/"Ԇw|䒠U2M{ƺ_YР帧ZiIŷNvە6QKڇ;@thOttIUW؁?tu+| A;*FM3q-h5=ahm@%0ްF2(?/~~G\a"D,-c1+}*i ;-E)bg9l|Z$v96O & d ?T~oLp I, ȑi+̢$ m0[X4J:d'4({Ɩ^deS"/O1`g~rQ2 E]~ 8t70#ʐ=;-XG[ @;%}v9Y%n99jLYu+j%E͂v\˙y m N06C-F_+yZHƒ38XP cKY4$y6,"7Z,W \l&?ZA]5JyD#;J%v@J^H,cHOhKƛ,'GݡF-\cqzW;'O X ,EژLʐY 'QN L4n"10#YLJG˶C "DFJ0q&8B6z0!!PJ䮰JbLOPWtL"?~=@5MfN dngw?Y';}{v1YH%΢~nޤf']"s|C?c/%JTޖv,W*wJgUL`-2A艡$ٌ1s8qM$SIQJJ; , cgR_ɆHQcdhd&ʎRF˞sUja0qJ xW%],~V]&q3b&+\"Jļ;}$U3yV\f Hy>d,[`dzYH9޹Ogե?{xYY g>}dy*@ӏHT ߨFdQɤ8{}5+l|eD}`@ ɺ%)&|H"=jSpȅ'''ڠF26]W/Ƃ,k!P 5Z\}A"D˷&! O# ѫo)^1umI N[]e,F}x@Er٧>Vg +snXrn rild[;%slCe ؐ @5+O;8 lh.M&[qӧ}x>ːNNcw:Կ9g+/ ?'_`KQyJ@(\:I`D.GFRH J"Nn֬JՇs}>T_Ϫ %&,[y 0͞ӕov=)] | (\26vFBz\k|JnCSt7t# w=5:=_:0FZ5;/?ͬdL@%`Ŵ'pDm2T2?g߻!39{ < eɶpJN ]t vuV(|ih8츦/)Glg.x3#l0YiBjVt WvZM%Q SD xcn+w5!|幱Rs0~-^{\PXwj˭|{W[]EXV3+ ^k1bO63\I''#F"9Ky^g#2`y.,Y&FhR!l0dȲ"F+Z.R@x/;֡R5Z\+Z@48d/*m3;cN84+! y3r86i J`Ғn!3+IT}N-<|>ć7?qryS>/g._ﺻ701L40XaϮ-jt5n_-E3p5w8W5BUJbf*4ox\K>󸋧2LuC<^(jo_K;鳑Orϯfl^F@[$G*߇CJ󒹶9y{8\TOT! P6%Gq؉a\CG2[k7%T[:++-9W{uLɞR+5+lHĢ؍D(k #wFKMXgbeX].D]zef/̗k[$kdq)#eFmR~"1~+8fO:$kb(Yt~/~_hF~1N//n@[XC}<$y6;!&VBo,Gv!HsHn#kHgx`Rt#$C74,C j'Rhyd#nAN~oR<e#Ovrp A.P9쒮6qZi)|CLG(r+)~w_z-+8/>ɓ[_bX;=;)(ޖ&t!_~9t8nsٵhҮPNcb$t`Œ馎ldj^uԑG}zDWg>.A⪭n UALLhu﮳0?x4FiV?쩽Pty燼3{Z欽a~hɃѷdL\}3ҡ@R,g͘rNݔ>!\y-E`Lxg"&N#W*LIq*)퐍F B HɢHa{f5ZIYQ +VfǙY咰: R9F'E'QE F qfѰB]$&%6P[T!m_vVgT@c䶌*Zze 9X;%u.j+wCm<*Z+z*XDD hmr@ҥ yAgEj 5FvnZKs3JύMۡ@ѥ-}k$q7:%:I.;]LM$s@)FÔl=B1̎P8XBЂedf D{/⚝ ֞xE#*wu,9~ NJ;tNƚO-fX!F Usju4ʚ ^dM3WؒyZDmCFTi𒅣 (\k`_ZY t.{͒U"$wcj|ӒLJԹF\5epH1:@hTth "&H$ѽV346Iie\ҋS557%nu@lmK۞/pjN׽-6TlҲO5zЛjN*mO7xx FC*pTI{O%i1 nZ?wEPܑQF nӴW_.?>\{z'A݉EZ|eTg^g7iF+Q(lcNbm o=&_n"]g<X<"azfO v}qY|7!ה&e+ֺp@Rn.\?+½M|/ٰY g?>rFx$6oCf:Ѹ-9ui-}j{p\Iao^zw\r6Ra-ň:XTK('ERs>r9ýVH-ڳ*aKR,g۝T[R-ɜp`FGK(?!ɍ>xɋ:b6%(&~Gm ljE Uc8YNow[TEpȓx޼eܪ.T/xPSW1BcQFmW~5/E>|&pf^l=c q#h%QPTG](F G[KaVfGrM+#o]aPGf QR#NF DB9Z_g疶ٜl80hf9v,urֱmV'S µa"$;H_@@p5} Cn)IdSj;'[ qZ *YИkm"֖ -h8q0JR[bK8]3ax fJ.G^cبG]Neϲ]Ϡƥ*ⳝ }a-/pcJof c-.B\=^DU۝=ETx zc %XXJ^h"9K܌MJijlřsgJ)i4\:|JzEQu|~LVN)br4"6m(q9K7 AQ>U |jj맦~jj맦~m Q #9 ^H KTJ4t*!g9?nvO&Q~mPFwt8Lg|'wtg]`zڛ-q'CRH’i]>uEMv>D;NӋ/ Ԙ ﵮp&ª4grU O T O r%HLb<p (hpE^3Bh`JQaUpC`3YwZgG QZnƻN|y ]ba ;mO9)ʹl:DeJbu*dQii] h (I+\ p#jQY}T>GN5hG@uqo#u}Ӥ[vI@kbUDOy?EyIu I&L/dh8VPAcAG-hTK|ݨQw ȼQR.Ԩ=CгH{WEzjݧE. A`#yK$ؔ3 @8o<@Rh9mN+G֕-E[DҖ,a2%g.PĤC =ZDU+E0dK׺r"42aIoƝ~u-줛!'K@8Xsxx^"+Dk @R:pvzwl'-SY{3y7`],@fy J1nKTR)Eu@$:+|L_;_R6`~!\<}ֲMaDbj藟ց)D+ "gꯟyQjLLIkC CIq (DVCeμ\Y.xd9&[ő-BQtX^ЅZ1Ya1nx}z={v0MBq6Ϧw7߆)(t|끟jet>_7;Lݸmx P)9{ fcDţ[UQ:yT?uw"CXSMj1cĢ`C\k,W0zؾ|K`O!pbəSf*K1R\d2Xe2/`v/y!OB?yP.d1HA >r)m&R ?UKX2ݨV!s͏qwu3hÚ;?FRDSh~::)aF) QHJKƹE)1Dj=©;EHSE%hQopPg ƄӞ 8-07T]>Db OjRݴrnw78?߱n>Ppl^0ґHG] QG:ܼ_Wh:ƒuq-Ma08i, (8@\ TZMVV}vIU<4-נl# i[A(%!\Sґt_)2ҤM?g~*_ӉM<~aZPOwl\mE2w߯v] Ϗ߭!zft7>i aʗßRa6BwbAkF_~pgI5*%#pJo:C4`4x߻^bJTLIՔt$/yl4bK * KV#A.ɻsQd25ԀO%y6p@./P%2?;$h)"H"@!b$ra8º$* /w NhmRYWݵ(qt~K=.6wXtI\?qcH҉`I%dtJ@HyLAjݡ&A4тUNA0Iw8+R+ZU8sQh!5;pn':v N~QEQLE&X`0U%7ɢй͗E !6)έeqE]r9Q,J!Ux8A[p kGHY"_>VsSAΊx]Eb)"CMKZHOlE\w>r@}SuE*} ܼ%qvXaI9U9YhBpRtrbd"ţة`XF[8㵌{7Ly Amo' nP&fdր'ZmժpUGWX(q"п.:4z$fqk$2K$yy^$?u%Wh gݟ{2]s٠>j|?b-δ =n篣w;=Ogʷ;u8G? `ð<>,ΨSIxGiۉޅt;1bm1>ZN.ۿ&Ë'`nkgMǽٸL%wS#UYj*"qv&<(PƼ*J'}ҩ9u2z.`~O/iImHzrS#qift$Rt3F"A7=0lw"jQ"2j1p`Y {Th&)ҜPb-#%hnf";|Um \`-6tP\Ri|ǃG$>?ׁSbL(-!a klDq8Ʀ#AcMWYa ӗ)x%,Y/;,})( J+K &Rz R*z lkPy Z%ctĺVNN;4nG1@)9eP"To ؆Ekfé}v&6>(K)mп%S,%xa-&6Z<ccƉ$lg(>eŐ+F"8"M)y^鴻&x~'N*a#CptQ=88, ֒Qc7a:I,r`7;Zw'YbpI~] S@ !bg^, zkv׃6Oza)B{Py>|JVxD-xyR1k9sZ.Q@דJY9pc4ȉ>]T\;*׆G$7N*N c,gZ*Cv}z䞬 PHW}V*_@uY>1|A$eT< h.b.<Nr3i3`3o~Giko˫5KimrM)oOqE8+.1n>[S8XcW:sC]^7wXETD9Mϙq>3l #5"tT~gVvH.`Aɫg 0C%[BO6VњBF[Crn9q-%W E,m4mm}}GG=/Xk蟴zF]ݝyá*Ӧ ;B÷j*nҮi~_½_q,Il+|yRin!Ic/Uʴ,ݪ׎,*/?YNd 2_ն̟B/uaHgqEɀF#t =yʛ BiZ)Km-hJ"t\ϑ;嘗oki#Mͼ\W_ݕXTӡnD<;`nave# su(su܈vٍ?ks;tzyRh|H,HډHfG{nAjLt%`rj I}z8WѨ)#4Rc. g!Xafp';\yoFf6q0ID.:Xl߀Tm,ѣ=Kr /* HꩴvQ_pq%%}Z w]NupӨxB4A:)uġ7>x Gi:7? S}#m-㆑(:;9U1YbNrY\ɗTU}-:vĨ2TD)UzYɧ/R99]a&!S{̔.JN} `ukcEAZӾV!XŜ(6Oj $i'7VNm\'+_YrVп'.yLk]]^|̙;csv;fٻŚM&U\DL&k l+n[DG3>ټnxap;9? EΘuΓD"CT 1DŽ4$:\}CwUJNw `IXhq8 pc&foγLEwK/uE~|W+šk35Iİ ƠBFQ!BKl$GX-Մacc. 1(R`,b!b9 Qzɉ{1L"][#ԅxʀf?x-؟`rfhWW#["!rAQy$ vR̸R& > I F>\`*wgԌ}mWSWd\-Id] +/!sƲT <*D*5޺WӱIM㉓%qzc'a=-ә|^potA B$^<|~I._/>x5G? nADkOz t!g ~!t^)L3[x>{1X񷳛 U1OfT邏^*0EwW5RJpFuHcMDP2gjAxYfBҵ^\}{-6WVyK8 x.AvU)j"j8eUar*ļFlɌ&T8Zk`ƯGn׻H+p4g㿻 n3xE[ujy{J&2"Tfon6{xMmAot{t!ρ:MD`o)}<fS/iofྥtY|f).ʝ1RUxGʠXY^<WL P*2 3 <g0$ W O?sY?/_)r"hO=xrvJq2Jqr\z7rOBd>R1{똣DP:yR,UB"b:f% qeFaUdDK Qx,r pOUVdûI| 5)E &CIJgF ?z͸@8#K\g7f2%Ew* 9u's] a[M1`PXKWs ? 1I6$ODHƞNDh F UBP>#E|jF5W(jO9Պ.y4)K$jL AFIf~0._= J.֔]ɶ:2yJMJ5| N3=LqfG﷫P-ZU TSiuILA;j1țdds#!6El( 0Ő`m#k(FjW7K]T|?'bD'rRMs[Z8c$KaV4Xj܂xGS LJ0=yP| 0i{-]yg]|R^ЊuISUB@ziFa2 _ӊ}{ sܥU+X8L)݀>բ*gX<?4?gK˂E1~~z.L G?>WR`_{a6uwE-KWOR ~WQhdU%~[L/Iyp5Z=cʃK̓.gJ0sc Ik梷sb ! 1Gh`&W%=|@cs+5l9|49 WD 3!<,2S uW #@\D\xz΂5NEFsif8K hPІʙ'<8,彿Zrsx el-f%mi0a҆xRJjjd֯Vti j/KK, }X8f[B 3?|Rk %"b*%^Ғ1rX"GH ۨ=RKe< Ϩݩ&R֤̻T?b!^(̀) "(AEdAiӀj)ޑ 2XHFQNZ'Z kXék%)Ty;0+٭({ðsmdp%t&3 !aS*{͚PP9_j=Ŋ GM v{=e*xx5t;,X DfتO!Ɏ5L*j^t%KZƠL(m\K b $E!P(/kQD X0\ N#oATV+&ƕ@) Ղh#!2P[GpA` >dQlR;+B>]Z߀jߑjjڂNE58H(DqV;P)!JЕ1#!gN5H(דRI!A=SA-&!Be@!8l9^80; !V:I{|M30@ 8m'Yɧ׃rZ׀Kճ|-tx `,- F%Wp<Suœ.U}7Y}$b64rYe+jf ~_fiIu271&QAϛNtgjH~|,מx橚Ŵn,]%*Weܺ}O( +d`*n4QFwaZ Ł XErYk7hTtߥ~P^:xkkB*C\YRo I7p',ƣT!|\MdRlUb613jBNO7:\\"3NQkW(UDs 7Ncf24P 2rEyɬ h*Faiɨ(yX*QG0 mE6y/M};hrC\|)E ĎA'oPt|0:xܓZS Q ku( &=)Uް'/c2QZ=>nDz0A5VL0!(Ai:*J`Z ٖl<Ͷd-h"ߖl]tE꾖H ֊UjY \7,EuX j jY|Z;0"TyDhyޕ#bb 2x40O~ڗht״*1~RٕJL!`dOwp!ak+,dɻث g$٭;֕6,:q{຿88o3|BhW>s0vYMh޾n8کbZq[ˣWTN';å!' 6G?_PnUMzMn^p #]n^9p@*kORM(`dMXB!&x@4֡ :vӏv)?k)N.軄/ic,&X%:4mfXVr{dBk0:׻T :!{|zk@u67Һq F/_%֨[ǤИI:zsVNa&ጶ$̬K`U}s=;=MtоXzzO4BkDv~]G>T/  h&*10d]]zYgRlEaw*a{4ۡIw&O مz4|y?XzM\|j+|~=oxյ癖);Ks]Ds%aSĔN8p-LLJaug^Z6 @EN+r|]ImOZ?suTX=M+[5x?t{wlDTvb*@O~)q%$4>֨ڬ\? <&S>/ N c("b˿tꤤv3C}gf}"~XEbCݷ_>d7ָ'ۣM\8KgDLj3R{ {/pԌvm[_⑖8y$[\(TVo6N5777[pl3Ͼ~ԏfݟwoUtz'֤cXEgɸ7ȣ!o0GF&`5v՛"=27wd4w (56߱eD9k6JϘNEMΖd%3b*Y-{)q5mֵ''c$9'3Aʠf=ĢiGnH(P#ZDMUJ]g KEoB<͙y}#~D?'ί*>91)mpiq_n!ع}jۧqM탭gL׬k]v?d>%NB7`3e(a.t BvK=2&vICMOk˚ Ie"?J???rħSb]k`WLRGec(hٚpWhPnZj-6Q/ ?څK,F[#z8 r Ҏiaa3a`94q*&wU5L/y5 ;r,ONxFӳBN;N-;9,HrvǜKͼ׽iElg1h5k;AGoP2:m2Jւkv^K?JQY(U}ƒ󂢏ń`8Sdj u|B!YmL:~u/3n^7CSW_hϸ;֡՟7?tgo^͑i˝Y3p'bBcܙKqR>u$lLOz Έs_+0N,һ)+ZyS휄J"0|u-4+,##Pܑ}`DX>!J9DSD*)MEUJ U$b :7DC7)ܵ(} zsTM;dJhWǏ~]SFك=4`͛ؑ3RQu 8ܺ㞗BhnӸ \G"{$˞~\1)F1L=漝p>H8X3ۂcC ';$糥\ 9::u ~g<<<};6=+ ǦdY8ig~zvf'cy̩J6a` H}׾Td(Њyƣ_az? Y0~uh~EܭYyr\n~\n'Z#o}%α/Ѐ1a3`O¯vOl4=<ۋl8.l4߈:Y 4\#$n xܨU~zvܨUMI9s}:N,vC=$%ȎNjmP8s.DCRٟUM&̷w;uIAۓYz1cJmȽ8j?ş~w}$Իm,j;֞)(뙠jS3C`3-eEgok(ىpƨ [a{1IKoU1Gg*H Ff]_p?L2eAm~}Z5- z3QjjJ6CXVCӯO~[VJAND"`c.fv:w-x?Aa֋)dJM|;:ПsfvM tѨ i`nv?<.y!qb x9SXeq_xUM>t훖רY}rZ z27{o(T=T{2l_jzІ>dOo>>|euIw LǬH*/`XJD~oo?܅ϟ~[޶,^i7!/0P";a~!)7Y3'SX*tt̖hMCT4(=s*^!GJQGrh(k@ڒB ,*]& RJ׽>生ר~EmUOK ]7zNve()NҔ A!JUTKL|ZHB'N6΀ WuQ<жJqd/dۮiFeioaR_Я^XOpx6VYbH>^m-ןf.A Ou@nOf@2-Xj] h7Fѱ#t'r{;J޴`VAmKRnIck6HXn %>dұ0Ynf.Q߹ @]؜R!Wn=-%b 9/SK竀<7W@~l3+ n\1Rd/:Rǟp 2p~>?'!g \)}fvNu?>3Hm貸m´GfVc۞óWe G L U#l9)QZMkWOzv^u!~N5W7hq. %f#ۡMB9djadϏkܘ>|SHH\ZܘxwPw=/uݝ0ZZ~^JϺ`!!6UCg(wG#IJ^E]A$XXdY0 > b-Љp6 œ&&\ \r{V>T[s)(9x_[UYq'*L!> e/X%&r H9xo+hh.֙Sv1Vdl4E6m"6 TI*:TGS{풙Z_LI Eb 3Ye.#-%\Z7d ^,\J7Crg-w~͵QvoBcmߵBd̞垼H &tltG>ćq ܭ:.S nOp}ԯL1yU e\X˜ ΫbfXX-oߚbAP %H,Ș(#kmF/^k.rK^>$WguwaFKhBi "D+ⴣKp!x[y=Mps;S_u+KJfq&AbJ 6⊁GB*Ci#qi9 eXw/x 9wō6ʒEz5:h Tľ\k7m05 YO'Pi'{.PP$Q2(aeG=aTc-:(h6*irԓjXeivVmv,'X+4=nXu`,^vPq:Pbbtj=סiaFgvRgN$D*(weHy|  qJJŢ%>plYҔk~,]ީA ͪ֠AȬBŬ-.%VS0(T1Lـ*\?N֒KaK7;Rə%G*9ߪa=y0]lU"{})gaZ# gYְɄxV17w/Ea\kwоa#0sP'Y0cАϽHsC =ľ82Drqh"]&~)ӈ3RL4024n=)z~zbjݟj^5(ӵfipzu8uVDCݼx\ 3THĿN`}jB B8O] }^?~\"̋bwWL(oAazAΓtǤWp9F pA7WCnse @sDLfDAzT#5NW)UHrBa* hw+v86tDjXsVGEV2ݴ83*pԩԄ+sV_IH%]o{RFTrZVݩQ3qn-6tו}tTB3'PPW8dT!eIse7/ԡ'Chlk2nԀVXS<|O+xj9o2\5;{fA%|3N!~4\%;^:ro~ip0 ;>#{ e07I,HxRTmH^\gXCU?l$g&Y-K څBXz !]g?k[iP)V4\ǛRF׊vY!=~u4J)o|F3Ѧ<1!Tٕh~,0B^c](Uߞ.Ѡ<%f}LmdTi"v=  9|㳗|L@KJǿ>~~+QPUM|GMӕ Ba> I݇ oJ.۾jjix/kKd78le^"@h(>f̤ʸ]nhJ^bJK!6caPep _7wk&nkYx1􆭝,%Rf㇫hFATk(%jyg./}ﮦݲwe] ~{ IilF-+ed*ҠX`p`[m0^@s^|"c 7P940PF0JOh!c4 QEKACI˲ZZB+<ڻTpH)AԳxI.ɼD N2XqJ Ơ5=PQ2" -g5 tl 枣dSy(9"AEZF[Rp)WE?U.EE\>O{[6mx/ll(,զV΋ ͝zLThk6p0:yQcV]E10Dd;/ Bl9AV[7]jnmRʬXfa) ]> A,]Ѐ:E9{e>RmtS=BYm^P(=Q iS`!Xq?q;ŜN,v-=A w8"gM ND(K$4E,KʸHf N7Bx |X'"B6k;35>O}8Qvk*~Cd iNR8\F|jRJ  l aoȶ ԓEkno'= PT{A@y5?E3"}S֠WT4NJ0KC.E-q[B jCGyuF#$PN)Fߎ1|k 9[ )m,E(niS͔L7/dDgkX-Z$0xI?{ŊyU`5lki?܆xI@Hl;m&˂MkᒚVs/gS =^KCٺǿR#r`y7)[ A rwӖ"ƃڧygY4]O7(x @mB 틟A 5+(}Kuaho򽝌 N\t^&7qףlҀt\:jwh<\ kqg[hf49o~}zx(mjǬ;+lGb;Z HA KoC:Rbo$JJFNqfF$$gJ|s;_Mҟfóⶣk58!N }&;?obdn=6Sձ͘U׶tv{ ZӰ8FTAzjT}Mg*p3v//V6{?Nj6`l$ـI Xc*"bbA&[ѠiPR43uɩjjl"2XJҒheܖZK&qـT3aVMoԫh$%XĘP &36:WK^Oq?^T1 O#E#M?pogw_erYܖH^ΰj$=!xjZmT3ӴkO=:RΩ-_ua"",~\IR{^9^N+fIKύ?`w qߥW?PQc$~⃽UFG?:~6-GQYUq |I=ϳ$2] (z 5vzc>MNqWGe?qJddbˏ׽We<H DJg{+7(6w_Ő7CbR [C1*BNlB5@BoeR+);O4U~5Re>nІFJ=k JB\ ?Ǚ0cQO-i'pymJO.kbۈl6:Uaƀ6ejv5Z]rxeoɨ Z sKKL?ݍm(ÉvO[ dؿDMަXc_\V3{$V?R~.VgȓCX\)(D՚2_pC4蘇^AU;iD.];8xA>!;89P*N{ ]t!˭+zRWM'Bq8uߺPá9n.JVW {:&g9ýnFJ?jΩ>zE7hE99yف>HζD~?Rd,ZzނlZ ¬O(i$i$i$ijqbRmgҕڒ T8JgJ9!R;gP_xNնV vP*^[w5.Gij(sS?8^,TܠZOcN2HgJF~+Z8QRwW} i_߰RM(UJ:w@4Kh e(cۡNΧ nwQq.PH p&BCmiEG7w;V5G7uGwAWty礛h;fF}uUb1E;8mFI@O}28aN#+Ul_GvxFUQqM8`}yCl2@Z с,)t#Ц$o"VZ=7)* m?J$I$I$I.w,(f=#;p,:㔷̂Aq #wH lzoQIe:&(I:kֶEAyAA nNNcZ2P eAiϬtQ-y]IgB L7 rgYDpo qv}V !iU//RVlvQ]y-q*:ke$''yV$uNP wm#_mC/o <$c,fv|$x_5%;MDĎ(Z0d0:C-tN( "c#9jV[5TJNsEuJsZ)ȥ9EŮsVZRfy?($h*M71r?v9ZeCCmҼ3))Űn~H;h?bKiwsbC0zn6ο]n>ZD4|[Mo 6vu9BxԄBAC֣R08zK)d|YB[XPU>ג (3T=i~̩drvXqvёx ٨dfR(*cv{_F8m2zI*:[c^ 6sSIvszqꚢe~Lbv'GP믨Ci@*&*oGtʜire%SGelp@\ޜ I~ڶ{zZʇg?%2Т<e~Z_lJ}Dvʘ6.1}9tuO])K(P}֧0br`BfRyYB|s?VJnܐe[t`N+[b[Pn?Ml8wa&24*JX;)3#ˁP)ɖo8xm.#vBsoP"~ɵ̑ǻ;+g,Ɠ.ud tD=vs@u:S*iȆgݡزzQ3JEx煝V+)!ГuLs%%%g $;tr~C匴R#6 DȾtx?PBr"K 5%ڴ#nhǽlb0Cv:!fT *)ġL8Ooxz>kwg_ُo^W9:ׯwNE}YބIτhmh&9( T.RDG,!7D{"xT!HMpv*[`˩8#&*)r.˽jB\bT:c4x}#̨) 00GK f4Ad ~|sQM }EDQPFv#yd.Ǘ@MnI$ 8S*(\0ܛQYxctP|g 7 M'CGo?GTD EVXTIl(jTc!F 3qVT)(}|;#'팤˷1|L|71P㋳hGW3Yaq$)KCzD?<^eb#kXBcv@\`"0#ko]hO2ɍB.Gq-]5 2oԢ͂U.ZrasNyYY)| 5=~KPG߀ MߚIP 0ɩ4푑uN m'dPI f9 x x?Cx?C93,'Gu &xIb4 T{Fr) 0Gz_ Q7:diTd Gc*ᓶ0pi2Z(7~xuqY>.:d%9P.1%ZAbE|PД]9Q 2XuӠփ'8]>ſg`Q+ ASG s~t=EWI06`0NLbh[R`30KK*12%eS5PqJE56t1݇}0#sHRn?+J33xm/?{/:u>|}YIqQGGSt}Z@VމW3dۯc86Os>BwqBVSΕL6~v77x}"p m$Os}{p}W>ߘY Mq\sSjeE+EHra:*4A, LrP"ڼk%bՊL63"M)4%Z:-#fi[fR@gxWp2_|f *g fwanʧsT3Nb\l<\z6=2}<7ƼɁ790}cLwf͟{&!yC fc nn}SF&|N/~oCR)7ע4LОy9b}|F3Hv qz*ae{/ :sv S у^(:9 ˅zq%Eq ˜뼀RFlVtLHW5*r&}I/H w1[-*diŧ0z|+]@ A Dǣ6Njr@Q kFĜHESh!O :*C QDPՌ)J «H)NPreԥr !2Hq:dz"UJ0Z"5u7MSL$f] +9Z&YIA;.2X*MD1`DXtp:T2>6͚8LH 2!>\8'Yq ): m6Eo zj6EۦRʉHP*Q Ǖ\4afsqAjzP%Y탩[:z Mi_ PGxABdN*d(_!JaN80RB[N`4+&+`@Avb|}T|_EgZ۸_a˽x?TW*RRY8 3SBRq\o1OM&,ÙFhx҆|+ |6'5 w6CKI}ÃP o% 7Vl_k@6Cq}ϦY< *5͛gjܜS0 FF 5os"I [N$i!+\waAmgYaTqVltbNz0`M**T|>U-Z\&-"ީkk#hKۘX< ޘ8q-n94J$Hj<[iwlW6?mhG8?\W,%g}>DtyDI C4j}DTmcG*Dd Þ6>mᑏ"n [Aiˇ7-.P6 &5ם Fj.(_ $\ ŒA7J@ʒOU*rB N`*m<*]TJjO"b 4pU#*0BT$jlqd$&˥%Uťܠ611݄UR]Gm]2ݭ$ZB?={h2fbǑ{g»R >dZo_~Ƃ#E]kK)#iP9U5hu*)8*ږM%Q/] %r ([L!MȽ >G=dy ܣnC} (>giRw >}A9ۢ2pP|fU>*iIJa/y|t36T;tێ Km$(&ڔzKY03Q:ZBjʝl<[ByVLӳc&[ 餣Cgӫm+YHI9YJu ]e׷4leυTb fs֥>RLAy>y<5A@c;tIbpV۹k8Nb1σÇ[:Jcz΂{*{3]'C-G~IaD\N3hndNv҅tӽN;ivȇ],~R͖1Jj6đn}3{jV%zL028e*TH(F8&ZuRB"VZbtRōd @K%{.'l (őH(OX A4ɤ"A:1߄rRGZcV8#+hWWߛlt~U\=?~f0yB?䳟xsW2"2;qxC?t裝< #|04]"~q)#]:NsQ xs# fbT*0I^(Bj(5Esc6:W8Z2f-_׋}٧57WoίE+c@]Gi!"DRjR ]hfmdǓۤP:~dZ><8~Ճ2T柯߸!hw2?8!M8K@}# 𘇧+DDᮟro| n?Ofy0W;.)RT{}cݒ $ AcX2WrYaD"HH`"ۦT""ZXm`O^{J@"d@}zJXz(C9f ea5x_b V{N{H6ItJ vT8!hAaibEjU`h`HD2!Zm\7 ]r,΍m`F_I(xF_}Y#w;^tPcޭtCC[r&ZƦ(­87ne1K>#WvY;ϰحlCc ,Mtͦ0ңM½grgkT ˵6Q2 )aΐj!w.rF&JGBd]DnEIJ#;t0ϻJ7aS%G%r2JXS)4a p [4TL.N~7u= vbK~aN`,鲄]"ME11D8Q:v^an3RThAkK}\ <LMW~if3N_Qm? ;)"Oݝ{G_j$k1}N2a2f1azH-; _BZ9Z5Ύ{qha23 8k SyEHSvdгlgr"وH@Q- g(,g_̱sDVBEDl YRi<HV_VF-^ ._sgޙ,{tdl Z+v„&Akk>@UkB h(Aػ8n$WV@>(—^66vb}؋ <-eCR= 4)Er Df d~FkXMD4I OLFV!z>'pq;܍K&=w;b}Qh)$! E;JnU2cv20i Pi/6ylh2".h/rI!Cm!ϒ @m:s˗AxsP]ujw7=A\If EG -Kb6w" YV֩He3*pv* H%PY41T[[Tې#uEj)TA‹6veMdJ*"$q^JɤuFd]D%D|!# t.%8Yse.қ:tIK]H/ >Kv4tePD}b7aFd"=Zai+A.+ ^e($!UVB47TomE)~TӋ۲`+Ty_/oWTǙn߿D僇 +V˷Kſ޸wu7J;JZq,nnG~uM YysʉKN1C _yTIW[l'b[^&oj<ˤ~Zك>7!5yܺ Ɛi{ǍnGɘu.(;Nw01CqXŁƻDe jE@9P8c–g缎ʚHpZJ1'YY\ٰ$k ̮\bxM,{Da4ߛu%a1>LPo^jPAتŌ 6wgIWs\r3c &W"SKl-vk P䊆3,mWuuO?)/Mܓ1Qj{iKC<Im!r2 RNJUF iߝŶN1oL7KY)ţ0 )8|J#9HׇC1yiAMdJy߁sN\w7]5&`.#hem{# vv(Bm^W J-b+89)2] ;RE@' KظĺNuCSFӍ'wM>`{ kOTѧ`YDȹe-u d4jIj-pTd̓D;%d-YmF,ږvdђm]@Q^1➢(G9-k$Qk3Q;īew˸t{@7w^iƛiRNutݎHQФͧ2py7N҇OӫJX?`<>>^~j@ @CD2Nd|4:-ωu#˹kfk㐣mUVNܷﺟomM;_qRZ){?Y<56_}RCz֊V犑ԊƳVS 7Y03= Di@\zgo% 3 (6r&X;؞ xblqbq㕃N.|'g~Q')6B/+}}vN[ze2Ud<Q`䵸X.eveHv>ޥ\}YVU[UmU}U!\^u3 !29*#X4E`$#HޫΆ n)C\<-(/ؒ75q>-VOww^ܽK8q5(iV;-cٻZka Z o֨FaeUTZ MdSV|>HYU8ʹܵ YOW!ܧKE߅jS?n PHPtS(KރasTEM͇<}WQ|n7_έ)Jt?¥I\~ 'sr>>Z1g@j w/4w׷>AP ` `N B@UA*npeI: (̧mN~yb<vxn;p!ZG_:HQg:uģ*f!z0+sx7; [sq\>^ӟ] wx@ry/i}Gnl\Wh# |Zpu 룏ױ{۸~4dj{IeQht-C cg}Tɒ|e'(p+Ԕ8vءYyVrJfF5.eL/3OQfj=ghi4HK%QT2D 9gbc@)bgvlz4$3sN aZ޳Ղ m;NBL e'?QV3hkTF "ikPNfqo/okhlǟ?mXԢT4Z0tՅj>^w뿯C~r/~o6g6S'dǼ:LvWK%M35G̉ <KlRzZ޻{hH&i>G-|}:{ߕ5m@njzHjFuP\mwD g=qN9j*}iHu(gv a.(z ||k[DD@41y址XYaWa!4i&@m@!1;?M$'R;:*G &0ft\L2aχ)/սє@/F>MY׿_6> Z3,5Smn_+$UGjMIˮтE`$IuڣaH( g&D\v&ȱN昵Y/+VfC0v3t6J$3ְa]ְ}LدS*GS")0䘊!=GU32fQkb6ل: *=+ꞌގMmUUE <TѤ60K.`-"bذXOHHL[1U\Z(zsk6\7> -ئK%賖l-mQ-D{ypSWj;q̄< }PMxTE_5N?+v#u `B¬TJ逰ea5N&wp-)߬S- ۼ-pa,FuYG@V z$O.HdtsRQEۗjZžN8E9βĚޖ}Y!jfX򟑐r-L@PP^؟j,[B? )0C,L(G!XȖmL%FPM ݓj{Y"dc.zMވ(jTT9)&DNQj5 ^Q1w_l P.CmbaNCtCa=FL},JVrCb"^=Bת*vX-S?2GvV]ZF,$W+W/Lb9RG,`>-︭,K t]yӪeevN.[޳~} q1V4Տu+;sJ?_֕MP%"+n;wvbĂ@FLg4@YSg7HE*;[jdz Gl.8a4l{ Y^8PRX>hh(h&iۢVGVxn<.11kR4"J &Cо]eidᥐ@HUPh<ly`>*"yq*%%JCD%1&oR.SP>8MF/4eFP`OsRICF{x_M1J.?}n%=[#ą!>eJ;Z,A&e QE~"T2i&$Պ2g62oHH RNw wb5FYױ.+M*PYu,SRE*d$D(6Zqڔ!ݞ@ xKS)K|` Ob=5-0 m9K*ZxvQD. Ķ`҅Szx\QX ȁ6(1Ŭ @ }Pb}o_+e)W䏷߾$Fq K ?Kݻ=cN2CϠqi"Zf/4U¢lV }}T\=/n.~,V%]\o?g Qoҏw7u˽p#m治z7MDjmO݈j}o s NWm' GDϙG:p?E6GO jn ;[ yqzhz_^T?ٻ8n% ixS? }f?ն]FvE-9nQ]UHэ[Zϖw5gw?=:ד1HNU[Kc#/"+nSMHmp)ͤg0Z󻯹ќLo\ibQtQ” PulBU #gGkO/C)Z<7YX@)3._3_B~&w63+1h=RCWt⌤{oU,M=,/լ0VFž۱\.o! ΫDlMTpJ̌!i<7팆ZA.jtiB`-HԴ\BZf3|AiA[8ʂz6`U)BŲ::sVp=kMt>Jp1Z;pu0A᭮K P3\ߜ(MK4_I$ŽsdIs }N5ߗRjlŽFS21[mx}KM0"9*[ -0%P}8'u/Tα_-KW޴#n.Jch>h"DډN<5CܤA)Ɉ&fKBHj_'Gcl䒘teZ9Y{*# "0ԎZ7&m9Q3韨W%KHQrZ"X>mo)98C Bj|-b R)YKW[0R$]߸( Ĩ"1ibr2)"-5qQ*ox-J xN( 8K5DfF # #gqA Ҙ 5K"x`Mfqi] c4uP!+BѠ<a#r@ԸfS̓U!+!ZB)Z@9~N9)|́qFI imrvܫ{xkͦ(i _۫[63= ֫VA6-6MTR^i<6$2Y긓ѫ`dxjǿ0R~ujƌ,{*Su~Itp uXfOi+ ){RXo}`qCd&R&bjB($uƢMPZ45)Qx4S0Tm~\|[I[a.5UU+ >m '4+ Eh%o  2Lynu-p J)2pu[M2/BWs^)h`RϻR5Оg4URrJ;E457=Ȍd{P j7r#=E9HgFp6z= 6- vS@jnUJ{ϟ{__=dz]=OдiӭB{$ڠnU= r!vYF{J;@A9ZMPOW.,H>?]F?gʸE6Pq{oɏnz:pz|}7I_{zOp (zBioGz2qa9JEza%ύe_0hʈI'(y,}^:w15`qcbF;D1 {uw-|۹銫#<:A/O¡a;Bz(|є<$>}GNQb`ϧ( ɸ>A "yBnat)9xKѥaCiK;at0z y"Z"SNh7[mJ1wtn{6/n9m y"Z"SЃ8W' [)9S.m˹ej!$䅋Le>e^͢N5a]h$+,=7DjG 646ewmGPČ|&DNqllyϟIJ'00Z}U^rA^YYBFJ!t*9bQ7C,mP#u@x!|vvѹABqT;lŝuxCF]N1MP!|;G'BJEs}_WU" o:gPocB1`8l꾱OO5S=ϊ ZQ`ЖK T !nw[C]W J1Ⱥh+|GiS5^_{ɹ=חi|/kLr๻$N7Wqrq3=ї2ENQ5%8/f^/'_Nhv*7I5,.]?Y@ LN6MkN1B5kZPhp+UbuFv[boNg󅎜95KH"WZ挰!ptAOIa9Y{TU>?m.u"킼,#(HDv[dS6* E'gMc2'ߔ%EH)vK7!T~[aVz ԕ^J襤.>t@4ˊDP4L@!J+Ԫ•3bg)^/]Us,s qJ3MF7*μ@'/M&ӯbɯyv^;D;7yy-}/4Y\{\~QڨՒ&Io#dHIJ JHP@VUGR18t?T%׀Au F2J4Td TKUߛqA RK@NHM "F(gp14DƊ0Y3{~rjk9=.[.  Oo%0ņ|xwsZt,Zx-T)GW U(i7ImZrFt|J,x,RjOdʼnu4κʫhIaiQ=o^].[Wu+ee CO=7rtgP ajm26NhGM@mE=_m|`JeE'Ki1'{51Ӻ/iQ;k'vo\'n4'm8S3ԮDKO/zIH>//΋J.~WsEOtFtk\zry?-߫~q)UR[W$is39Z5$8=AyAT$>(: '}:/)'> J[A2*T@)= ^&Σ93vW쮃$Uk*2e!WW׸^^qa[W0gJ*IbX֓u"uUr?Ę#Қ;= ]R.xTG+S)̓ :)bAe-PmiF%@Fgf'| H#$X^h4 [4Eo:=k}[YSYhs`t<|*S\t9bM3cIN-A6~ F%9"hb)b:܏!FiupN-?ѓc'z:_1Sw:D E"3c u%Ěj"i΍T"WrwaJOz#W>'Vy.~oFGWv)K4+:祧S` B\epH:"r˼8R:$D)L'Qԗc \~J@Ҭ#TA``D!=0A4ʃ5j%1h$ ΀ULֹ%&&(皅eQ\:¿t7~)f?= y w]նjH}R`F9mE䖚H?#3r(|-~0%\֛ r CRO/>@R.%SZ79 /?J!D)$a!iX8],nwMWX"8zkki%J}~ӝzkoʅ puH\B\+ Plvz} LC)^4H\_2=n2w{h1_Zxy?oO![fl nx1-4=F@O -RӰj's@=It- $d/.ԫCl"~4GôM& ι6TK9#W*a )/y|'mDq+Hf63(qI˙5OuR9cD) h#&駧*y,.q^~o7~}wwnylij״\9,q*.?zp}f#uÕ,JՇ>\գX/K>jWD:Jx*<HdieÃCtS/u`nF()'LCWbo{l &`(Cf*`{v\3J^=<5T mI.Rz\wjB'ֶMŢ-_Rsuxs@ATܹԜqMՅ\>ZAOy,7cb| ՝@+[Oф(nmڅ@jR,P]uU~+~[ƛ[.~bf2BĔtd?oIEdexjU$h.S}ǠqO5}oJPn3e=YoD;ٔy7AKRлuAt}FvP FnkV[7N6as&MuAt}FvUݺKn}XoD+6%m*i!ymZ_jӒr6-$| xXN"4^Hn2WĞwE,K +}I˪Ԋll-Jj[bJn= ,ʽj}kgFlVy@k17̲LœGpYe׎6S !&ڈ1 j`j2i JC @i>HJ䥖#M!)y> KU{z4{td: L!(39ۤ'74:zwYB~+^ny%[2eIùxsl@H@0m򝷫 2 WPncr)bسg2ƅ麓|& DP3)1NI11Q<"DqFZ" $7y@h[gVQɝϱS"T ńA3DŒe"OQƖ|X=-owQ9`ga* w&_ckqv1T{W8fg8 p6D.8N9iBL s98tkpІ݆ o׀>I^=:b eJiG 5&++ge0MаNp{ߟkXE$* =q,~YWH:-ߛ OɃ-(ts0E%6 $/:88$ XL+ ɸH #]@:|s<3k<8wȹs$a`j/5 aIHM$Y'Yb(^*8$eEjp3ReJ_*5(4ߞTa2 +"Jq1>mdΤJ_"5 4+IXKR_VHJJVO|zbOJVO[)`9aD+eKiqK]kXɯ{:e#?s? PXnM6cPsOWA\\| D+de[G38ˌ6Nj`|tVcC*cS]ۺ}h3|0F4VJ3^ \J荾g.=t!8,`i CUJ<8F`@B{~P}Hӽ W:pA%Cԋ"KFdOSV 44Rډ%qN) !i%(eX-PbsEjjXztMl3IwS;k_]t(_I[w| 1Z'SOQ|c&X$5tɶ;u]ɴju2C]'Rk< -A&M|Wb4m($W7L\7:U# tuʱ&-*DP{QàAúw9%`L 29΃9F*Egd {ڨ9H[OTٻ7rW%!ҐE_ Ӟr,v vVV4 C4֛/u{X{b=dȪzGП-$F>.d.N!̇XjJOɻSӇ_|*y"93|%ኌ~~8Z9/qD&nIwn*z;-uzonfjY.vK{v*u.U~0Oy-n~mwZ6?r.?Ū:Lv8oumEzr];} 3sدXZPhfw>ES攦ߩm hRu2uRc$mwmҳ[OѤ9juSH`Ru2uRc݆>ܛkNkB+3Z9?١H$~LWV)oӡĿ|>9krf}T.6#HhzD& vY@`XGk] iUR`dt!V!C$HZtQC` Nx璶Qaȏoe$fQ$ F<,z^O ~࿿t=>y&tssZ>gֺWcdsu"ah bO˻Uj4_M$yU7ݟsljBۯ)"KPjqAj w|\H`Rط+$:\MD >Ѫ*&HqS[+L^I"o}~݉o|`>[N^]w$Sbi! L@\4D䬣nŖ #x+[; J>ƯWG1'rJpuJߗLzn灡mhf0^B ;jz݈2"}'=wemn۰3ҏ 槪^TOMԋcԻ?8d.=3׿ 2_(~ٞ"PL1E"S"sBB"J  ި,.iQV(L YP0XEzaq1n)&15啠EaE]PKVXZ+P~\u )O1Y>[A3ɴԡ- P-a'jʄ/_[F 玫"@* }z hJgn:O}D˖DI[ 'l4mywr!iq6S7Z>z7̭yۂD9Oiz/9/Vwww䔸;%[n377f9_qSG{YczR<ߧ(YC]g KSo߾( O—L$G߼jv5Chv`XvuO(%+*NIi^NX%k ]'^cG5|pyg+*7䵭yǰ0]˓LWq7c*|:n6tO7_f4p}VL yXGb%6hM; Z!gcP !PFjt=0Vx]w 1Hը"I^ T U"XȒ `PPTBP,YR)Dypp %DWD2[rzSXfhZ)q0tRd} ܝLɷ§K `iz7<7UVp2]qCy8sI‚1 QG#͓1J+]yOӦVoIOoœ WYák1rF"}/ ~NS_|9"g9ZF6H Sş,"(ע5 (gS1[T;W(qsNKʚL씪0ytI3EZ1;!P OjVů_{i?NNVM6kup4i;6ѺHf0YU,/a< IF-ooxa9+ZdrhHRHcuˮE9)Z~h42)&PLs-܋ sBaxOVor=G4Z0JGɇn;@9^" ȎܐoN+7㺗0q?W3yM &R=Jֳ fgOGE$t]xaq80Ku[;D Ǖvu!p7TK2)8 4ʦ\sW/nB bVH `+Ëf` nP5hPB) Q*U/OӦt@ڲDJUTeI45XTZ@+_g IP[` %#rMC$$rcRPm#%͔"R>!)pB ;[ő) Tp> n{'NFG_*);4yn gJfкMUJj'4cU n["JNp:&ejSKZ=={y~-M޿Z]n8{ <֋εB@QNo{Ϗnv381LN()A޻=PWH#M/HQT/]23s H3.sAP'1^D \bJsϺg][ 8u3d伮`ݿY+z+b*{}Ł^ak# @ZǠС2P^tK-_Zh+jTKУ:79!,*DM+goUEtF^ )VܧQUoqaO@@lb];&hƇFB;vsbdBCt7gK `Ic)35ZvG(P2PnuǰŎ[K*WT;iz*;UYi1w KIʛoOʐu+yt)sniĐk=4a\FQq@Zj +*9/m`%2JU%11JdKretO%E-յh-LUV+VH'1uL +QV (h%h;IҨ hDl3kV!FO1QrKFsEf9)"m FD"QO6d.1TH;'^R%Šlx9̆+,!% /ZdgË~\+r HԷd0ĔvB*ږ,,joGc {p?fK]fOϿ[ HO|!x$MQzA&\6Ve}̏ O,Ww bW_?>L9Ԯ&4x +*6a8g XWW<3jdA 8 rD:Pt=9/_PU4*ZC%EV@58* 0ɤPՔhwF%2\(*Mm(5Qkk5we%aUq?W#'ajdʩB_Ĭۯ$MtI*kXi >GC%ՊP/ J}]m %+QU-; 1@rAP*q@ZkIؘvAڗ}1i:e2 u@ L.0 D =xR^MG̔zT U.9 tKi'T;!L_FkThQY.* D򂖊xM-K5pDj@5"qt-(^|#[,GK#<,"}@Fb.fW(Y2<:A!Iƈ"&4b% ħ !Jfw~C2dD + q"ʁSbR2/y=oOh罴TY*Z@i&VvRBOJ!j%&^(\e[-30JIX.hf'q 7YGC>DIp;pyH)+=|u[.t]AL:Jt]v!a 0W8Zpaj \Wv*-䧸nM 뚓e@J#Z6@ArD dJzH bBFChPq? "%uD^[A +I#n挺~$SS ]B r;Re%je Sբ 3^5ʂdU]jK2E)QBvS$(R)? \AxY,?"~*ߧ b0߹xp]n?xL Dxh]9p>6ei7YFnHc/eH 92ByEtGD,Et1#p@ y?CG-#E C.k&6cj)5Ue䜀0T73^'"-lZ>i&b/Y\&{zrJ' N:_UQyVj_}~tò]ATj%J,18 92|1YEss6H'1bs;}I8YͲ}bSSmY>UͲתY^GvAK Z A+<bd*@1lr}_L T2Ĭo |&N!X^NgK/xgJYE~<Ÿ} sF aȑ:T[cY6,*_l~VG]H)RVI7|e#@k$QO==_jB/VMkil!4 ǖ@;rjQ^Ad~}P0V\[smuU/4|0ޘ;+ldsi:3V lEh_$w?w/ *EV7E){$T1Ȥb@d"ͺ'X$$i"A&Q*>CQ/~ڥ0;ܹCPUr }|Y'/_Uyb-#h?gT/{2[O"a ]v.X0V!c~a_2q3\i%|oy)țd^ͦ$-ܞQ~1dA_YHO!JV ~֫ dChL {v#AEtBnh-u[8j&$ |$v 偋脎N ֶv vkBBqݑ)po*Fjr]\Q,s=Ǭ8jeg=g{W>8qfsb%k<6psc$Q  =2k:I.xxy'> ި?E'<پy֫Z1΂ m$#xޛ0X)@Rw*Ho6n9 dtSx $7 +㢼BpzzN}S(YO(\QLBT:W2)ڴbHO5Slƭ݌C5l;yl^NP,'>4<R)m*3` :S_UΩzeCR&y9=욪WH1H;漊ZfvVC-3=fHVvr$G N(fP{1-x  D0q"dؿ^ľPq \حׁ16a O׌=u`3:" 6/n)z Oztc*sD]x͛lR큟wD|&D{$1#gW /DNNbCߋfS (Mw8h*uA\8Og#"ldk m/#eno\Mz-nbr"܉]y t1"<8'HX/$[vŝfѴik.SA50%0t$3OޗfI{+`:"1A1b s< Qƙ4>1>xƽgd;K>WIju F]X/-n{x@nۛN83~Hb.ND/Nl)l}^d0I;ځs5G<qG7g)(FD>2?gw?kM4?{A0Jk2lxB@NL"~#| e:ק= ^rwsW βiBXJx&t_ֱ"T$1S:Nd$I)Jzbpe%yt1([@v[]YxuwKNwC821 B21۫ rYm|ڤ#7]mWMlS&sCSE$8O2d~ԂqHR L &PWaC4F.9Q OT"JH,lET!2 )5ofi"a_8.rjȭ_ϯm8]P2zh*RQq|z]Z/û7^2$.?BWo>|" ro:_[c^r281 0 ~כѾz:5ڿ;dJ4ȑ0SNp0U$0&(1f)dݝyW L m,Ⱥl܌s~wk_*%0Cv] #yAۻ#d/Gд&3.  !HYB9)MͷL2L)J'|H"ڰQ+jy.B[dKt4Ӆk"V*(YUd!g$\pZcy)}d$):qqKEpϽup6Oup UY}up6C?WqƇKx1a=^i;-z2*Z냨qD+X;k ۝:GqW$4d%_:~U_`O+umdz1QYɎ"|۹“[=YXܳmDBW,K 7+ͦVx%jCuT#$=R!RQ16nUp㻟ON.Xd"8xm'TK 7}T-_UWʣ=6cgX\B.^zߓzlƫyo f/?)ua_2񆖫T)q(R֢MTv4x~z  Ã?U! E4H8~jPN>턾δ[x vkBBq Jd_='ڍ"By":chS"(3sݚo\D)LQ_'\1+NoZYY&쵑gP\n.{O'zlON[L>7m܆^z:͓^el|rTD2."ɂmD2HdzJ{m A =Wrw> oVx ^.Ÿ鴲NtJ'r񇉡rҪ\~Yųe9tܰk@{07V]Ps~[˲وfH+DD)u U3վ$^Bd]@īNp (`JRZM2(/af`_ lL,+ POݧS%V=qH 4B3#CHȘ])L{ф?[Rz^Mzjtj6,b00ޞh`؊ecqT:[|ы\Q`e2߆Nާa(erTi曅ZX*.}gofـނY9{Y/V"wp+m$$sد}}p/ h(ho/;qr (G(& y$'l263G:5ah5N4 k">eI$oyQQpV/ZaC&0^HsT_R 9g)}Ru)#.%4T Tv+_5*te|}de[DvƝ/:7ߨLszxoblT:_`hE[On=^W8'Ow^=$!J1{-^7OtfWŒ>܋ھ;p!#D,|2P}Sm},w|rz!і_ڼ)CX#f+x8Uj&r0[nN:; \@?@:J۷8ElR}bm'>O2)=|]#<^}\Q9 }a{yI Wn4o溥NoQY.'n+^ͬAUQf+!Q ROJSA[23j:Ow?JkSMP8\'LBJ#*2d6:77ZR)$h!x!äzMͶ13Zzk[R~>滪"of!x8f0;mQKjxͻ.o:s$(K\cH~jB:JuU*Jgs{GsQ͵lpsCC dY/&W]̦+[!>AD8e)e," @EI,Yjq83fVn%]Fmۚr̻X2gZNmY;at0utixy ;l񒑐CHVt6H ZlZ\A @8ۑpp*U\l4]jO VlQ+(6 y e7ep@'Fٕ[a[;bG#Vi݋cow GJ0EsT_=U*Sw_dꮻRA8DEZOہl.ѥ5 ֈ$ !k@QuTF-0ph:II,){ P$6 JR$ňCfTHiYY6-; bhKXZ E7 ;w<F CFݺ9#,R;g943R )9`Fܺ8{>8 Bc(9{k_ʅm̎vvt$Y0Ɛ|Emޤ|?pF^bnZc=9vM{́`D,];5v-f~}'}-'H?␭.G3=v}u..SM\2a,}IQq*XTjH_ a5Z VIݡffs j<ɺ=_'sh.@\Ts/ycO\"n;T^U^CF}&> 7ܾ~]gwojZ l24܋FTMڣ .Kϼ`Et$8" ӏf嚝(ӓd NE,8ԋ^%ռ!X9ejS&0M#1bi$Lpq w/U9v6.me(MSSU!T%zWtSz39'D0zYeY=znY!e#ZO+M{^e5.DVbp ym5cҳ5E7I }<\tM9WrO҇*zy|fIʞWSi|qx2mo&ߑ.K) EHJwmHWyY`%0[ `F^ƨn;_̒ZU"+I&v `-s}gR}TPP)k jYLݿoÏs:ͤk @+mSlsfEmT+GjT"׳J 2:.(CT%Qs8%yQZYyHΓz Z"ڗ<1 e};$6o[ nՀ"#qx)kDPѧ8  8e*pՂ9gؒMެ׌'Il :{ZЍn*\ջh{ fc.^F-S4X~5,K`^*ћ֖Z>N>@pibAŷ[PNη ͗{秳w܌HDu˒ZdNf>HSr2C%I%!zAI|-IXr#caԿvۥ w]8%Nͤay0HIm-b9F7pSggnVF~9'J*ZN|#(;AF _Je"0O[^HCh6PٵޝkF;հ02^-Pr@Yd$6KCEEVsYoP$VU XɼMm7Fnjy" Uk4^(2/БezAtnjAf_m ׂ)2y͍a(u Bh4 dbخsRx,Xy}f2uKkk ˵ HJ]v]IfR,uJI ∢6ħS5IMK 7MTM] i&#]Wu ;o 9OWm !fEΥ] U'^螁ҽr~rckflt q%ERBssb9vء B0iN,pNЇ~j`)J/)y_nlPK4Z#3nˁe:_Bdc/Z7RDĭvQ=%I;Bb ? so&UBVXZ)^T (b˄ P6q>{3Av ]:(O?1*Mы6ABByqz~y4 kD6].(4QB~EQidsyޔʰAvoUS4զv3}WϽr^H9 eJ&_>xCNkU(^!2JQ!C7w$%;Pg h/,'ۏK5O=j3.O?_?|~J驯Dd\<<~Mci8}/OΤĀ{@l-f{)sS-HDE3)DEՀխYַmkJ5Lr͟^ Z҄bєt5#6`-# 87{[Y!dǚ6N J_h=!1R/-%RnܺC#*Vav+YīOmtFMZV:ºS w;EH@VuFxlf7N J95KѠ٘Vb vJ{ްdgNd1b$xUu>px#kp|GÐ.l_ү}CM)uźWP0n (z%;{ RP>h|yr2;Zod\Li}"T{hעGWd [%,sy-o]L= tR5. E^j݋FMy9ȀRB<~Y$k\^VD# K=GRXa%d.礼H'K8)nXh8 tg e.CZ7-˼"S" ,drJY$hcP"@,J.@@b Nx_S&Q/ N)$8L 3(?ftUuaNq 0 HNNvyҚ[aJT.vI8X՟HOb)PNGO}Wc|–GƓ:;8W"#-,y'-}I*j5qb50$7LBt?w7q wwWi4`2׏gmF ?t.|G+h F%Ep Q~+ $ sHo{}ȵqObዳ?/an_/|N!4cɸW/C2} /Sv$Z|7UiHo^ח+%|[',۴]_qΰW3'h; 0\kRW՛*܀fS7p볚[΃m|o2:Vp v w|X|XOnM4$>$}P-hHT]{3z,oڅba&u*b0hg/$\ ںEK[BJUm&"+h2nJ$;.`Av@XϖحVV^%OiLFNC>8SjQ%5zj+m)؂hZs+m*3Uum3Q(Lj9fk-5+2)t@bߚ[ݱj3 #'5c&֒T 9&Ԯ( mKM *KV)`V h|KMP\9>d-5ÓX7[mpJMν_ʒ^O[$G=ܑlR=j4A_ߘvp1,B[GwqAH kEFYn],h[t%B ^~pA*?%{Uύ{WI)uA2&+=Ψtst7"Ca_g3")!jPc.aߡO1XtSp![05LR$Q3ShdC-INTÙ0awPY]iYoBVIۭd~{:$afҬύ<9VLݺ9#rq9j<3maÓS޽1Ձuv$Le vȅ"FdDmm$/[ An-Ƴ 3j\d, 8_WV,SRlR”{EEZMi 7M]U 6u6=QGz&nV"^ V "!rlf؈s=?Gn 8R|Q;YFPc+ckCm ˕ȵ[ư'D uԋDMqb"bL[(R`ȎAb\^ZAr9ݢA @V,Xщ3%I7p묛Ѐv9iw:[A}Z%Hϗn/FKquO0ArZ[ZEwG>q#![ɲӽm,*:[bʶ%XJhal\c۹g뙞29`n 1/.DhkUoWVs[i-DauA軾fTn5FWLDhTN(LS(C#p/.f fVʄQ`yh֭YQ8 1#JJQ^Bdݣ/}=# fA#&b;DW1@7D0QwVm((U Qi\q(UV35J:J:mݡVY<h nEk糷}.@a)q#z.Q 9*CPg-P)Iv-2Ķ?;=?lqu[ 5 Ҧ対tb.=".ѯRYڝqn sMv]Z|B;[5̚þBfD E4gR^x Q+e0 I$TgoŽpaK3h>@"JʘԈFِ_  E-XlNV:c>|p D3=, ozAj5]M89sPclY,7'U;ҳr+@(& ;4 k?{?ԃc:ɳ[wq㤲r$(>OW-z60hGͿڧ}imF^Do<Ԟ P 8^&.\XDvzŵa{{#J*#k7y^Po?7 spHAzֱ\qk.Ϋ '?~6\-o/<\ m4(Dxh"+RAn,y2*.>SuXtRT 0z@ˋȯ#XN P.p֋V'g {)!UmK'M_Çռ.p]}BݻAwΠ.00xeޝCʧ;Go?t(#\S+{tuXZj8=/\F8ǟ>︘]!si'i8?BwH&N(N-pf!gݼpL"λw!Rޚk-Wt@5輁]OSWg0{qteGG5 D4k<2$ϓo_vqrޑkmH?>6;HgԸI'@N߅w,D\AFmzv=unxav 5)u5qoJ$7Rf7Rd PT9JK4*M#Jk X#"ô؉_W%g5kl KLl^FRQ,1vWTr1y.;0ƕ§o}/}_FGi'0kO_J^QkdaLa-;*t?lavu% ze½ - P (tA9 $h} {v hjf$$BnoW?`ÁFB~xVb Li.~P TB(_YuBXtޑQ!nP7r:IQb+[V?/"ىg'~ b2 P JXBVʔʘa$:#ݏ(syי;QY*/z˧yS6 .VTcPD!Fj`*ԑIfH5F c@dT̅S*18:MBUܔ{v_yx6,-ׅ $ _XҚu7E]wTcA%+ o_'ZH¹ǒn, _Bi9Z+NhQpwKRT얅O*{Q$;T"r(Ī;Up= ns=T?R [JRVBBJҒjC-\I)/UPF{!\IiA5ej[JB+UvxR~RZPMQAJ?RŇ^H1v?`>jӓ~9T.'TbmyP61g W3M(48&vgnx3\e(jM)b;sc*(ռ@FInoD hv.#b$OsZ?o֚άj쉹޼[rj0EyYǒϘQLD \^npʴTAx~iD5,i!i3p#w`^%lgݰ2ux7HBx!x`$50S[*/v^| !$P rl&ŻJb4og' O7;hHiɬY =XRTnǹZa'nst ܨl#`ێaq :1*a O2hBDq*k[I>ʹ&MEN;0-VRIn/nl2eQ1jRIŔRnad,`3to3X9^xmn]➌-aе[֭szbLc澀M~sMqכ>Zpo?ŜBάMw_SVT~VӊჍ;?րuG{PGNuVzsضo76f(kznZ>9hx.B:5EޝsaBu ՜9m qBm൞p[cllMVKgdr|;xNƄK~$@]=|5*D35ZcJCIluIHI1!stԧ]|4[;<8&R)G4\H$6]$b9^tз/>o݌#Ihe1|S!.IqR&;' r ?;?5zfk9Ss{2Bn*kW,^޻Œf|YYZno)Scb;P>xz; N<h£d/vwFKZ.G4ˣd}odM+m[^jpS~L .&k7d̰t 5R!?3\d4IIѹH˕9 "r*e3M2,ߢz^2J2-b-W3k|1h4ZGK}j/_+RRw Es%aAXiŧ4 ]瓮>sFUVR}t'd(EB DFY6)h/c,x}[/XDKD,- ᒲe༫l~xDK^A[<@ ʹՀaX'%/ X̟auc AADAf;ʖ>lNSd"p̰D`Ԙ# q1$HX{%Տ#k^4xC)I3b# 3k)8Ij <'JY$i,x.yA=7sc0S4ISHm icc`FSby$i&bReh6c;,-W?-U ^_ƩSܹ.;'+ȿ@0]]ScTTkʼn _Ŷwj!|WrO)!nnBM&kZw?-`y42%@%ݎmjO鱔+wT)ko'E S:F]&ξ}d9<8_GNA0NwO!owKo;_xOo6J6}J2ZIo(BˎL뮄 VK%9ZMÑtwvPuz*x 1Z2zk;:i#y ]$nvA\@O.i5ojN4ѹMN 2cNAvyuq탮, ޷yMKx^S6ւB2=Je\<(*wzݒDFyJ{W Mo51ܥ)[쭼|*_Gvmr}Fؒ=ϥqߋ3Q,imtrnPuEls8T<򕍏t1z=|x6\Nuukg}9\ŨCwOFKu E_'uӘtA1+z86t8I (ӁCt(e#6/t{ѓi[z~e2(st մ E!yZQWڠޱK*U\RI$ K)'#(*S*ci̒] Zjhy\g%[=.j zxvM&*Q0BTio:LTc?Rf 5ʜ4Dņ%a/>Am 0,g J tE5.{Tfϥ@k ˓OS\K.-i+P*LǟɿQ}wȿ}{e&Ud˸s=Ll_JԋϠiJ]|EgyImi@[9ek)2_^r0̾һ~ΒxH1 sҬZokO{rrLiF`An"#Z@ٹW4JqFm0o52W|0;0u:b%4ZJiKAK#h0W-Rvf/$O!^''yX;;KiUgdApֽnOu"u*d}q8"@és`B؇qRVæ;F<.y 6n\ |, RrJF9oO_IDH+S|v>O'HJ5S䱔VOWz@e`J<ȵ0yJERU0ҌOcJdFJMʩsdk%#%F)I@%]#@kND "Yby)(CAJUh=`oWحR R"?$Kxz˩ӣl]ڧFܵ_-?Ҝi?^>#T-7|Z⣿N; #LFFUb篟nFd\mPq=Z ;\oASƔPK&f4?n.yY)Dw1(&6 id&}MqAwߧȘJ'x>}b&ٱ:|r>&ZQVUxhl HR蓧g,>'V|;x.[aOg9Ss[$_E;O{5iOrQAOQ)gt0z5_odVvZno)Scb;P>xQfhGO/Vt]zv+Q2˾7au,c1=7Q)TW1Pb9#4Z(DFqB!N3]oY].imbrLUYzAc#7rZ۽HbUJD;O9~+~8KzZVbf$YZXh14eg 6iB Y!  f:!PxV+Ur-[ GNSwzpڰD4L^^-T='4uz_0UvL'!V4r?s(J&kr٘;c`5ԟ/z[{/NX5VejyPa^~z+,%HT4g) &g)#9O\i2ę$FB&ͭbUK{..Ͽ ėw<`9Fk.KH4v' Ҟ02ۣt\=}b;d9+E?ѿzo=EW/0$0; ?:ZϾ#/O>ف_ _Gjo'.HtoRI W w>}^HmOݮf,a=Z'['l ^nv~3z2y'G牺𔒢~*J>djQ)EJἼCRtb-BVD-R r' .BK[dr )Y{cVq~TtR Z(NTߎSt83PX)9$1FGL"`uw*|``:pNW/(DPI, ( DS1%pQ\\WU?e=)E鋖|勤ٕŒTΌ`rh_3 J)RRqBt1/E2iD#͑V)$Rdp q'+Azci-( BHɫY-iiNN:+$FQ&\jEPCU*M4Q`&D㺯H>Z(!0))2 -T-g!Br~wo X%KYXx5^yC)Z#V$"'%FOGJUD8G:sHt%FAvT_Xl%G5-HTI N6%) }Tw+xS9DegD5Z˕xj*!l̾Jy͵%5fݯ.(>Cs㥆WSoVW1Jx;Z` %*DfH {CnWjrsQb9`Xzٞ]2ot=onLc ."dRns ܺ샺m]ۉg74_frGn:\GaX,gEFvƳ 3kϦ^G(P5!_)Q'֍H`b:uۨbN{yN̺> nMhWtxO DuQźrAqŶ[0@ք|*Sq\ )g靲5:?W/ { usC,P\ʔ&J1bS%R%1$ ʼnA$NJ`ǂ1)לb`܋D wX3lLbW^]El>]>L>Ǒq1?,+'Zxh6CIq Z㣻#k.J}n c!65 yK!(PsTif}Ox.?wOÐ>yww~\{I}`ս?&-|yF]Qs]wM"h2L7}NL%"]\PW#˅ G>c/(o у_Of_D^* tw}B~\c4oBVA?/s9T+r̉};9y391BJtFBxRtjRBd KmQ6Or:[T~azD0: `;hϞi'I-[0z@jU)ewNi ]3Pޭ[4W-9e[DjbFwOЈj>nys6Mw͓T0ӊdZ85A߬a_*Sߏm6Qb~z@9:mTV0IL/weThӡn#ZIf`:aLRPT(0>EMu[q>mְsʈ6v;S%bЯ&YD3-#I!X(KsWdpqMt>"T$,OE #SI2dwNV=τ Ѳ볷 gd/Η.wv~D`6e =_m_7\ZEo֡@{w"]74f>}|Js{ha֞E .D6:R* bxR&O}S[Nmv$>k 0lJ~2Rdr )Y{>*X]< i]Z9Y Jά|h*|ksl˶7ٲ+2InabWP;WcS edKiԱsui/"𹖼|k/1-,ǦLmЫ65n46wT*~O`BX*ǸѮZ FjG6w²df&6?5xMO}GzrݵwϠ '+V_hd?n^]^b"+%sxq`a'ŏs}L4Idb\J"nKiϺ~닅]_أ{!{MKbD)#K1T3"ʌy0数?r L肚  ދX\%ȝ]/"qmo%[ۢ%^FHئQ"$rkJ,Dk*R#pᥱD,JcdR*hB^ f`Pу*3&1[,a/mlƏh^`²!ZR-2ed9Z5?xٸ\^۱`!%F*&1LPibt&O"\`"ʏo&]! SHP Vw`e](`C?;&`XdRxsSçm(C6rC48A%;0ZĎ1ߚuXc*r>'P@(p(-C$&"ckVVr" fqii*ScA=- @(R :C˭i=$+&:V_Li(o(CprJ'?oV3cC&1,˓I\*s .߱}6PXJ r}j Av?t ラr(rv2P-uQ3s)d EFiF8S va_"d@ 5Fx;^Uٻ6n$WXݐ;\Y۔]qOr 憢t${~ % G|8/ʓ\44t݄s!T~0z8c4Z4aj5췥(VZI jH9H*pIlmw48#8߭s\MȺdiukQyqxNԼTݽG^_Gz#ibF/"?RG x6F)F(oD֡ B.d3(Sc N{z ֧hZfʬmhź)@ZH9^ k-ٍT8N:N9AZ9|bhJFk!`V Eb_Gž@0ľڋ}q!3_glU?aq7wF>!r'᱑MVx ~'%Kad1X˿!QF5kD ʵfq&!Tdh !FA_:8m_lcd]paaLHكE)i W#n蓵jȾf*q6k]j+G1_ .6 Tn얻XƎRP[vqXߏNoo]hF‡yвt?zR}eyw~6e#~ {s_4b*˟}'P6,s˫\,V3N N܏=Iw$B4q`GNuhfX T+c?  Q\(U4 j>0Hrϐ9A(;ul=2 gPt(@wfY/mIy.™^8{?KW#@K i #Dk0O!@Q` '?hqy|ěY|wkh]ww_=}΋>MfwZlm6vX==RS;RB>ְqqQ5q?\|&JuK*ļߩ۟\OPiT jI@u ;eVMSѫuUZCymFlM@M_^ *ՑGuzRzJIzZ5:{+َKt/OW?5qPaEjr&{zc 3Kwn^2 .^|/f/k^{Ջ+g11t)IHa!t$GKsNKT=R_Uf J䃖RO-\rnV_ث ٨cɬ]ޫ_ܫ {UhX$w,I0+Z-O65#ww#aU:e1a5ul[h)pOEK5]OACBIRgM-J<XA 5 vP]ϯ$^x3Kz ڙrVCJOKoq.p:*G?l)ؗ:%H-{/>KAwRIgc`p}j'_^ ׺VQd"K3C\EJFvuĘeEcDPI색 ZB!`$rC9B˳iEVJM2õr|$#8a&( hsPӄF$ {J?Qy¨&IK1匸RHH)MQ)@$Xd3b4I9ʘ4^ déx,SnwaoVEe4ͭ?-ز2jqY._ՅO}$ĝҜ J XրXY#EY;!%nG~WVֵ:R [Z:~.*T<9hPԺ9$~̿usc9aHPW!5tBA8I!p3UұK i=..Y=(*?IfZR)Yӆ(:e vWOք 1س;1R?nJ J7lR_27xF\%αStWU a!Gb[RKg2O.5>T):g-J-uϹZZ!lWpZBKI}UTCR~ZJ ^h)E?-ͥԯƫXWKETRRK-=k-OK|+ }R_UJ YKKKs.eD_꫊ ܵ^v.)ܚf8xOgSKĕ(5⨓|sͲ,%cc:fNQe4V(RvYREF$BLAZ3KL eJ3%L엧qPĮXi`ʪ苕q/~V[bf3~o<#\K@ c Fb1XEd`ܿ&G`P#gG`8Km$n=%$P*!!@^Lg7vfaۻUiXEc84¬^Lwg6|])Jfs( 8m_Hj=/wϐkht&5=١ʴ-(ႂfA~*H, X,~ni}}FSVWj`5 Wstk?bkۧ$H&/ vؗxC UyþW 1B(mF;Y+> &X)}"oŹ_vNX֮a-1݋= = QdKNV蜾WwϓxF;)I&%l ǒ #':OdpwrΩ%Hnj"2\Ƌ5W;|[>q\H^bm6ǟ`8.F[~LUi{:u}m /+&$X\ܑF6jpyJ.ܭ7r1Gf)!YPeWUۻ̽ K)|>bTBؕiq(~,`U?ioJTm;FqĒ};b^Z]rbožaJ"$u1L#)aZd;c7`dvGR-Q:X't[sq0#Jw4*1ygVo R9xSȹl? A4LŰ[%ES 2~-&\ @Q>'E Z (ˎU/ӴH.C dY~ze)4=|t e &kW>DtglXļ쪱3Cq?=~WřBTHh-W؄!)w9`s.F7kwm>*0 om_LiW7 EORS3>G>Tg^߯;:Y-JׇY6-=;-yUw]HH*5"[KQi)^h) ?-ͥ&[K۸cTBKI}Dj 2槥9TjRKI}UÃGUEQA;_ԵT[|=Z_꫊ Di?-%I' -%OKI19إ筥Ti)չ* - Zh L+ 2zdL41<kiɨ@1IP*Ifֱ)#hF'ne]V^!Me*7r֛m^MpIO,3s6$6"agc=m1L_FWxX ں3!,pX ^A#Ȱ%z$>*k Ҧ9ڪ4rF+~ƺъûvǕ`ݵ<~x,b43Q+Yt $RO4ܮ;ZkW̨jG쎵[@d\*^;fi-RuLMOX34HQ~ ՟sVqӪ.A'Jm%XOjc\4(Op$>0aW[& HytL[vXU]M\?-j._Gn~Nno 8 u\vK EO(9RoFJ}j7*(UIWc( W p`J jzMĺ.)i@+kF,JĘMnj5)cB@EVAFyޢ[.Hٔȋ GRċ6ώIwqOQgf .Sy1ˈ  F#!F m^6y%#]Cl 5(6xhip kL>`죋KkU3XDhԉ v% PAKu8ۤop:wj̿Ҽ|ܿL9=AA$&e]/7St}tէ6gq&@T (FvaWvtRӔdZ(T$&&o"̾6h!M5K'8ABB%)7 2Cn0&BΘ&$QʲDnߤ(aTwI_pKLMIn~3YdIYlj=`hY4Ƃ'||Xζ4JDiThv>:8$ďZH5!J"YT;yBAC#s)瞡>$!Rޤ0 %5)qt)KIYϔqTqa5x\m)=[X3T&\ ޟjT=W[a99!RR$';uNRUG\#uU TseO=gă釉!+E2[JʒkmQ @+`s9A59E1uvqHi=XKa^sf}ͻ[ʓOg_䬦N^ޝJy9Y.n*`+~\,nOON0`Sci.yt:rpukt@,](uX*|d MU6?k*5 ;=(68/~W\-/MAX ӫN +**yF*hx w B^~ж fֆ[O?W<;\sc%Gf0(aJ܊ ~TWOoIkտm7(nŮRYMjg;Ԫ_!J<&ܘ<[\]VF}.k.r4BKlx3>Hf?s?՗#yGO:e;B@_@9ͪykwETj%6(V K*˥ 8O2GǂxuT+ F{^YL@Ulaܱƒ^SKT{sQ# ӃMŒF,q ukr);M !-ȗ#y:BI]% O1*.i{G (S#th* YisQ"K"/i$7y*'v+ϥaDjstsfv'*58DCF9*(#URfKǬL%hd~mw0dRcn{sAcs&鞽 a9>X'n\ Iq=s;\n24%h3r6'AWxu)T}]TR]A([WiDZDK#4f/N1AqNF/Kh2qn1DIw/<=0/oa}5_㟬fEh}*++& ZwC^YqXV(>.[ǭZ?_<.{tN5zbz)|G6?o };|ԝ?.6YSn eyF԰Y 3ҪUyPlOS]|*S XCr5hikǸΌn%#09I5EqAD4xY@S[XmU xo x$q ^ ;Sȕ}_N BzUƟB@TeU3V+ SI'rFѓ c_",M8_na39 {kώN>=Y$QY:'w ?C?ȰjemB}NzEVO{p8tfZAd]^r–w\M_lb{޷ n~qSҖa)WyR>sSDpRbFqK92W#m)tv܊Kѩ/:bXJU>c8'|H1LY1 7WVx-9XJ["ni]:8]so@-AU[1d}7_sxqG,Vj/mogGk4Go_.Bϫ[(._37Gw GoqQ!tZi-[] 0>6` A÷gWg-4fGͮcC8n{(,`{;X_ ^K'd8s*']K8+Rp ˼4ᰧw+] ?ZV7Kr̯TOS{6-7nӲ 'O$G[#6SĶaOO$ĻÐCZc%IU]?Wiru0jpl񓆌\O:p\Ƒ~\=*Yȡ*Yt`!'Zi1nnI5j 5-"f-W jGB)vcI0NQß OȼByy;Alj%}Wυ%%cLJ+,q*P\~C'œKŭ<0z#i&! R*dKfSvێE԰ΰJ*g*GoCI!JސGv PMTΚTĬժePI H [e(ݿDKunaoS)Qk7r ^\[**a S;Rj3R`Z!x&Ip=>r/eK(3M- Z*A=`{o/kRHR Z9h[t&f/DM&EuNIh+)U.,w4e{È0aNrH:( q˒'(dthwLr}ތ^"9_3Ԉg4[6]&%BGSMDT bKÝƄHHkKombdhLq6yvch]S1wtnsݒ'rHW.I2ՊztnɇUXM[XjǺ3R#vo'^B 5ۓDߚp[9>͢SHZFx\Fx[pWZϴFc6UnWެ7I's=Dh<'\}տxI3A#v  I] ^dAwdZg>(dqX,1cl9h .ǵ0C#<5y;.w&BϙA 3-x+:f b7:<HİpTfI̢~$\̔HIpvp(po8,g`VU\ Ƌذ2cΡz_v@;~qd`e)*0bT12lVL]N:ZꡅyCwcgS乚j`)Jl ,MU0cQ E¿GO .D=\'cj72/! df#N̰O*ZW+< LF/aC-ߖ_o'k|82Qf.4!} 7y~5"TvO zs;?ܽci7ӫF!!_&ݒKZQ$ڗ:UE)mcX Cў9IAV4L Hp`$Pj$g1 ɸ_wύH_Q]v2W$uL%).c&[_%5ŇhSĖhFw03Չ-+; >WŲE~A2ǼkCNY <tޖ'O"۾:ȋ5`/n|C`/.(w"R]Q۱Z E[.0aDzu5nj r߫QqqiW'D{!!߸V)I;r݊7.Y"A+QπJWD+ !߸ɔ )9D\vr*b%}Ǘ_Zb?N.DS;DŽ"IEj)qiHtZZ!@"IH%a#Y Qtd\qi)?sVT;( !O IʝD5: UT5.%(Gj*@Qf-5yS}{x" !u22bE%b!ʹ8,Bc'wa!28_):xU;Oo^ y W!ϋ JD n)ץW_Dwj%rEײ%(bZge(u&g[ ˧|O)y Rd;&**C;nQD! z"@*EgY;Vt>{~0$ffp{u̯ǣz=uc0)߭ga]671yj|XM> uLíoɇ5CP}cFdg!&|'=A$Mtb@$EHR.8H H )Fc(6ZF'hepcq+lv I Lm6YXJ^JJ DS`ӄjq&_`J&ť8U*`*`PBY2XJIcMITPg5 f.V!%ZBe_wE n*I{%Hs'XN,>V~sJ)Rki ,U*RԥS,GW+ރԌUѭE,$>4NJ1Ua:p͹MCRsb; g$UDLSIP8p&A[HD:b0RRy4'>/mfw yb-E޸f εO~z]@ǿW$7߿#>}Ux`e_<jig ᨘg ~ 5Whr{(wMoof~4-fj2=hh_%J<$I(&Wfi~ ~c/9:Ձ'8gdg}J `_g'v^\З9[OQR+X&E4IYa].44@7 OY ڔV3Ks[9JfaXD*$ wxI 58Ix@+RD WL(Ĥb^9DZv2.cS$M-ÔPG0IZ\+LnLQ*ǍG)cy6^\N XrRpɫ*S%Sp&TdJOUP%\+-Ƀ3× UZBa':]Vppֳz:AbF퍅cj!V)]ĎUQsQд9k3ogm'QNoܧm ~=X]܁ZVn9a\_c4R7>st$*xUnXe6C _|~Yg3. L|]%Wzoo9 mPu%EDwP"[M+sCͨFԇQwus侒J.:' mZ'E[+㚷([ex5YJa#HűÌE as!۟VLfx~2T9WtU2Jd/Q`R5{_6CJ6u'GY%M-H)NP\5_ U{^sNtW%{- hO@QFFbܯ_Xo*xlz!Igza7htvOa_x5cw~};/w^"D%r;M#H%RF%KCx#C)TmM`˚)DNV?l>fߠ BYPV:`]r!WՋK.~9QRy=鐁,D˒~drQy )'wB& VV&S/tLL*pN+#Rh%__B ^h#g]IߚtB|߽)Lbuk5/J0gMi>S+R*Rĭ4R+_{f[T j]] ͕w/J*wo>3s\:wwA,xKj|;yoK푩7FǛltQ/&;g{2P`sΟiTQΥG3;]HyKq.ZX&KN]ڪ>{zd"Lg>*K5n س[ D3GbdƦ^{%Nf*0/Lw> #qtO!}D,zUEt?f\X|Fe-[p1qbeR~^M[" H%Jbm=3Б" _P\5v1XΫqޯľ'"v^h*az|'e8 0=s3taE*G3+֏ 3>g|&2-,OXw`z 4tε]xiSSh2=}='sN?m?ApsxbwOѳ;g]Zvֽ] Ycz-JUq뼥M-^vɒVܟQ]ѺřX?}jɍ^ݸ%ϕ.et?Dj={P"OZrs}6HiPw5jm nYRu dfNj3U.nq *Ԛe-3Ww>IJh}=*nf*OCTX/h2h:w^3Ѵnt){y_IHyQ~Wb NKdo}S()cߩdJi1LXhB&B:PU?[҅BWZV!z1UUxjY,}X )-!sD1KHcqbX)J*N4S*6%<100x( 5T밐4IƉԱ$HƱ\&F!@NVsJml3U 6<ȏ&C%QH ᘅ6μV XZ%tgdXIO4>xFCau> ci@8uc&FkEJ4NWRa>ByC"M`\1L({+U*x`mY;`igoo`F*)7STʆdrPF_z}@?"\! Dq8*KA!*|(bpȻCKa0-jɰc`(,KzӡCL0fz?oB<\8PWǻN+hJ_ǽ H tÆA{ך--G^*hpH`SuѲ K4& 9& vhrJ@xS'v4 LK}9Da}Qv !k tRj&v|5ƣS@23{!nÌδOtIg/~XοD9v>8W,g%{d )9S9C&jUfZZbf2yPoְn{UƠPN¬yc0wa!L=wnvyM*it|̏3gs&m@3w_&Cǫ$ČkQw3^s˷Din!'9wֹxqtJa-cc+k͖t22[4ˀկ@ XT9p\wLC'Ws}xݺؘ@kXS' BO"ꎢ`J(Veaayƞ}pOnCuK \H> \')*+P>qKw&~xO0Zrzb܆tj¨hTvB|wT3BXӓyx^lkG$j>yRWm+~8Gje~SNO!^GU)/On}>by9吧M?SPvo2}+{iԻsדώ,pgߓKOɋw+󓲚$J2UE_n>nU1cn-PXڭz~YvCBq$Sw^]2(^} A;7(h!B|}\)G_Eh6LSIy4&pgnJҭkgbD,,N.\$6O^n*pDIUq^EGd/Abt?y.4#,,+/pOzWV6- S\'LBbK3#2&vg։@pV[{ F`U, qrWc 3:[{9i>ǬfO>w$ lc9yQ^CD$"REfoii^-=6 G,*Ҋq8vRMQw?%>^!5dCS Zn ܺZX^ن9kJ( crN2BUcMVm5Vl{޶ҷwAjcgJ={jcոa7L%.إ$)tX|=%-gA 8 <$G1K0)$ĐB1c'2Sdӄb$f1Bcά NpJNPqRIl bcX,aiLA 7޵6r#"Kvm Yi`=daxlG Cdui}U#`ƒB~U,^+P<7 ̈́% Ec"D"4ڶ|{ Xj>e`W//]*1%['\V:rO RLCّSBB"`Pz=ɤR :DL($s-jm贶Wk!rm&du>%H`?]ع߬1ò~f~/'c?>ٙ\k&~: ,+Es5׭G:yfrZbVb%^).lsb{q[c=è)lr'C]UM? ,ڵ1jg&ۦptCF0)Gv\{\S4X΋3=@|T(?Љ[ߊ1=] 6c\` MC_~UTW>▹++>6^h>J8Үס ('ƍ @o^5o/X-|~[^=7nZLck4ܤܛ=H " OA!k=$%wdej"uN>|}0:lݻ6={L(;}$Tdw\x5:[t  %'9979&~d T$&U:y!;C'<*/j(oQd:n@ I5$mMlF*O|A]:ҩ(a琑qҸ HGVsho,MyNE/JTRo`f\'pkS$JY5~rӿ썭WԀj:J4{@PKHů<~g%w3\$SO2B%LS_%˴%FK_Ce\,P@#SBY9 ]C"PN7]@ r\5e Nctj@<ʹU&gJK3 M KeRB@n˔RW6T/NQd/>i으إ27鑡TN)HOI?ޟۋTݺkN$w/[Bc#+2)ԻȻn}?&̋&sHgռ yT&MY5Wۇ؜NXLLXbfr#AkIFLgBo""]MC>Fä틮&ZwJhAZ[|pun/o=Wk9}9o 9}d q@*5WA6XtpQHW΅u˟EKڸ,qe Ng)wq,#Pg@JM,i&scZ.Zg97}l>o: ]m'@Gv@Ol֝5gj N o-E.+jd2KD IڋhZsL\w`sҺW`N:Wȵȩ lc4Zs__h"K2+(I/VqzF@z NxGSEL/rԵ &hD-P2q?FA~ByGQtlD!J)åwLBZQ+(P*$Loͩ( EGKEApJU)5wF͕@{U8~Mײi -}[L~&Mkn='׌b_ss/_}:ln-#AM%t5pg{*W< x1]G+BR"$!jb2;"i)NqKӝ@ {|1Bz$f>Qa˾;g%K3gb挺sCl/=>YOZIkAJ=Z HUZN2|&]^=*oFG$u$jȌЖ ,ߺnp)n7`x$!Ωq]af<8 | Iup=iz41B͚ s6`-Q٭[*cPA"YQ Kryju]V.EF2,)Cņ0k0⯇>HI9bz.H>c2(:ENҵ|8au|˭CvYc)!ѠKSe:D/ mDXtܤ~UdFf߆eZV3FtaV( g!]v|viq)N3Z,`>dr0&qDBwfͯ+AŃ93kIKi"N̬hozwf67Ճft̬#QE5ؓ3(Jؕ3sХOo*#țP*n*+t0 ǺN|ގa(>}V+"bZ:y >^NQ~yj꛷2^o]~ T,D\|>MV~I8ˡ7޽=%LXӻɿ537}x˖Izisk ot˅_.Q|"Wك_%|1xbn]^dӟ_z^1u_9,'!h-J\:Ґ/\E[锠w,9BOhb:mQǺpTny5"[hҥ8wH9 i:픻@l2ukOBiݺА/\E7uGFls2 Luw|(b; {# &ۥKvdvMm/FlזEEq#w/҈z/]ҰZy6p zj|ȗ8SU+`TI)#Lg9xuK-3mt&r۔>Pn7Snbww2V̴;y$Vbo\rk؈KzSj, fl9z,8Lo7ۋ,}/ZbBC<4y?D|"g Xw9hqU~sے7{|J;09Jm~A].} aBhNDe4qPg`)HaIR+C 6R,Cȝ:U~4тCMXӤ`Ƿzը`N2H)P[I墥稥i)4.c-=VS*.GRQ\BKy--ZM Byy+N(T8- -vR"Q E:vO[}j΋r- Ꭺ.Z M$yZjIMTLKxNM.DZiP`jrt~7$I*nsgBX7 o~3uww4y2Y~i<ڹ |ߌo}Ez5mKiNӴӕ:qY8GyXٙ[Q1O)]E>PshotYyVVt׌{JJ1=>7 wrw s顣U,0[㢤b,ɸQW|H曙h+꩸CTűbftB VO۰ ߫)/VCYcP:&\WJ]ps$6e{ۖC}crֱ =ۗòwi$%Z$Q)ӪiUCB.;nl"g{N@6~WFZ+{\ 'JH_h8Aƻ1ִ"Eu#)!' R\Zt, Li*e8ק"eF5IHjNHo%']Ε>Q D(xFaVU o}oKU5x*7jnNc\L)y|0wៅ7Y3:OdyJ5I*ip-X׉3^Ǽ)gGk:_ge>i'~|緿*jٝ "t)FezmD"KD2pTL)]IWeL|#Qsur+h;}}۩ bsT9JьuVBI'vłGtF,e,*?>%#`nh7'7ƚzsN ;J{qF{ V_Tts{E7no%''Mfp3{|X:LnZjR4!~&d\?Nn^(Ho/_.=ZLjəxhd[JX\kޢS5 _? V.ㅶsVΩ eBQKvNev"%G鍧L۩rU-HE(^9;QhVX$.vr>wsUCA9I;7CNn:ޤ"#MѵI_r޳3pǙ&uÕ_ۣxU=V+$bkR8хN^s GʺQGW!Cp7Z: Ё"B؊t4 gl۠.¹5FyQZ@u:~i${4< 6okY{so ˽5ǘkAGe'JZ TXsj y‰y9NwrĀ*uee2.2wmCYOߩJ7U*GV.Sw*mڗiö[IG 7g:7°ӳ^ hxe?Q3Q`|u~ֺ F^7lCI0*^tlRmzԝ;P3ANѦGmpd5UAY럸T-P~;wDs7.R^2K1wR 5ĥͥq)5{'ؠ}r)En\jƄ52ƥ ,2Ը>q1r)n\ʅ9-KK50KKu91BĽ,C-U~vcRݸؽk\JZ %'Kqsɴk20)I%⽽oc&g?N ZZ/O*!RsI64!k۴i*<74dxx8}Qan0j,i[1F[yO-Δ9}3qQb!eN$2?=ui~^3L_bX4_-[?ğJ3#Q^Em5ny%\FwW$28a>Nq;T~d8"@ݳC&=#=)J+rnۃZ+OQoLq)BACfK 1IO\z\ʵrmA5C\BK r3[vM1ZrI6/U2{Vѯ  *Ar(P5PO0[ZwSKAm_$zsuԮ) fCi+ OlfL͕nݷ肃"vݤ dY< {fTdpb*3Y Ah}3XA3Qiۀ-Ԟh-Lu`yW>N`=K_^Km@՘&ꐖac\Lc1|Ɓ) d iΥ)(Ƞ]g9Extu힆ɉBk8^"1UaE("\6IUG͔a<Ѡ 0 *Ag"ǬrIaS'8PB`!ӡBJs֎3M z캒l:UVR%X;7x@yI:Jc3lchI zW xnY"N &M t@b@TN2)Xp!&L34IRNI[fpogtVMY=AfiW׿ݑfx`@mAۖ*C7!Hcr!,)RsN^휧f cW9$ ƆR[d84 [Ƀ Zu |X|{-5yaoֈi0P#2-_F% 앾ǍneF4;/Io5Fw뤑#QhFdžh#6 ̧ؠhc"&U7~H\ʟn6Lzx*>ᑿ 0 m8<޽Q 0|?%~.|S~FƑr~V*30I5Gm x{5Qh58Ay$ |ߌ./? P=ڍW@}p&̃? <nx˟4lE)a?} Yۿ|=mj4...'6Vv{{~k^i3< f6'?iPz[l$/aӠ% ) [$ߊFapivfůC@EMFY&uOθ+s@ !poe6r-k-}2ol?/bا.G/i}z7۷#`GɁ7i<͈|1J>O ! TݱFj`<;QSםF67OSqпc6;[*'Ȝwf#t(i;n4 @E320hp=vFJߌ@Nx=li?m` 5fׯ:aFj3! E5[~Y󴇒)?*E;;Q~nX<0'=cUEkb5\ivMO=yzat _µtĵ4uhe):uǸ!t9CB3۷ot.1# _$!ehmO qpLj<.,KRP3)yX(`r1&<Ę%,HqP9ZIaJR^(4tXK&j(T ndW+bd1U3cd 5^=F:4&{pQj"@n1vTr&4ik1XdDT;qք}kXT")KS7yRm1t!Y)Ryt]BN"jCQ DЫ4GHJY)JH& B8"&3wHazU8f(bY:+T[#9{3[ʞw? 1]T]bٟcRi]<<1`5KHB<~? 4ixx?aU=B }0T!\ kI갺8$Q>a4*Itku_齲.њ)5ﺱu2yg9@MC7 I~M`gd$nGa-bY5>~R!ŷ- 6?k;O~̰Q̼W9Qݵ#rt L-|vx%hVsM>ǐlHR7JңgX܍^lL?7;z q\-{ja]H=.~Wm.> l/GïAPcD'u_4o>Sq3k3%ԥwu꣟#WW1wEk8 9+횠蒨5 Mq^/8ƄȒ" 'VUrJ!}yD0&g$0<'Co0^fBEC/V>ޘez~vډfCg#ДN<v H#*S"iHY$MU8BHᘚdnKy "KbG&<6M0 II #B%$H G* $# !U,t*xR$NXF%0h+\g!Nd7ԕ*RJq[1jDhw+ }RRμd{PxԚZP=8srĻ˅gͩ.8'; Wjn*ST"~t wλؐWEX{w/VdwpfJ7pr劂1Y&ᛱ4, .=4PGvMsW+V%>H$ .3V^g6 `{ۛ^,tmRtJ t^@-Z.NoĘmo_͙l=ܪZv,"&q~ (s Ҕiȉas _ɪl=1:"a _ Ɋ˅~ $z3jx6՘=S3X{@:HsգKLu;G?8k&63p"gW|oA5^ϜN+PK%Vlnn0ǥt(K)irԃa )p 6se~.z4-$lhEWa( zM|km$7/DX|{3^ @v~YMo$O2~Ŗl^6jI CvYX,Vi%򜻢pYYy@2g%WYIc2JӋΗMt:q:E] ύ-)5_EאVhT6ΡPI8ne084Ŗ;EZC Ia!BLI?82ˇƸ6q^1bgvCQZM [ןnCjۭ@buc'-?,ռGÛZ~<T?棙V׸=̞]qK#il-㍲\~w).Thɍwzw4i1!AdvL:, .r=;7%4cbtOҐj{qo2 eN@ p2C#vOΈkO LN.ҞjЪ]kæ[p&^Pj;yɕ#8-i㛫<"pt'bv[&8-:`ktWo<6yz-̴sc(: /w7~R\VxO,T3'@ٹ hԬcGkOsZߦ'W?rvB^9D0Uk%zE ]ۡvKA tRhv}LE`~-Ev!IvvKA tRhvK!{nG#[y]Td”4 l#/2 _.^R0"AǤ4*,w> vp3\ӻiۓ7_ˋ|7!<u@f>#3wg=y-p#)V#эd!%jHklu!)Kay}eۯMMdm{ڶRa\X`МVp̙/Zr-LYTB|mgE=_midY!u[=ݻדUm*)ڳu۟u/?#RbwYm=9Rtg }m2G?l'%+ސ %1-WGz.ܘuۈP>[] iZc}۶7/ ]mZFnOVvT'׽kM5wRmB. ewS-Cm0YK[KwL܋TĜ$icw:5D&mtWL {oWCP :c4n;Hŕ#[yM”wWEwRy1hnzanMU"[ym'l"s_/&7Px: _/,Jrœ}drWXͿ8z@ܟ˛\.~mr:O'-H'_PxJS`Wr4\\$GeDOj$$j/Dܸqcް6WG6 υQ[9;&F&6dw;&v#q<)Zպ? FL6GFV}Y.S轹dz\Uh:|oّeԋŝFO^+|JoJRiYrE~gҐiO*2('O ?;ǭ`o.ȡ A.y%l7 DKf kxY-A,Mfŏg.͗D0kiF](/.y<'KEH\#GT#oZiW 6&]sPJm*!|]z]D C7 m>)5j?'Qjѱa'ZhL5k ͏/o!0|7fJa_ȡB)i9NLl.Ҳ1#Te{^$"|y"jQK/J8kZX=.ڌϟ?Q"0Ƌ[UZlWӢG 5`hȃ׊u+-Efڒ!vXRɼRdy5ļYtcX֬H;7Q0ơ ϵ"PUך)}ӏMJ[1Us9&48im^K(X[7iCZ: `z x} ƽ%IB_^FԊOlp/5[8Ry=xx9fP+K1?/+mͱ2aJ 8#TWYO(s?>],Ֆ:`^0=(rſ~.\KH4È嵳]tC$XvtPׯAzy]֡\)RxTY1I$`ƭћrDfr!VBEC9u$(5Lc9 ҖXSš\_d yK{3h %fSmט qy5/bܳSY[50vx 0/tUC#tT`r2Ν0J`{i|ntZ8M8,9Ld֡ہfqRc[LjLuV؈lđѴ2/@qvy9;^"HQE!5Ǽ$'4yQVj0Z2|Pdss6W&;2VS?6^q6_adYF0$O|ah 0 ]H8 6W/c5J 9rA ,"7&^dy[Y8Yh)Ҳ6FƱ땐CCV=̦v.,~oKVyuAnAhmKߪ/?ܿ-X?? 0rcqxWUȒ}{ Ԑ d3|*ǻt+"8fWoo#2h&2Lk'gGE $ H_̹5+LlŠZV'FZhy2 {׾TgҪ $gwᣓ&+kC|5'i1HZ,UETk"vVrmz2}_gm~O_c2% E(G lY-y j0ј 飵X{8yLzҷ0XЊRAٚB5Bfhl]Pu$7D',SW"P8]~藌yfي㒮eg淛]4?߁ZÓ%ܪ!Mvrn'iUrTJ؅ eϝ8JU~Jbrfj2?=w1Go j? rV3j9ҒD" "&#[0@v9Ӆ[ \}vYwN>xc)[̗%vDZK}?=| :\EYҸ[.[̥iJ%ƍ\ծ,m%e6~E &ӻşY=xǜw`GC_}7j]_}{(̊'e0)&\ Nɒʦ_!h*NT6@jÏTV- ;{J(9zǞ_H(j9`_G +'(ʖy9 CDNC씵mi ia';;C+ $9xuD2Ky£v"LM >]+n)"9BEyo"P,=rɗiT:.!zU㞓і˽q?npXjmAS& oԣv|K G{WD1u[OKn_bk˯q:L_rc7|>eXhRIS'gPԚ .=HIr:%XUhcDzW(רx)!mӱEQy{w|$t\E6[{Wr嫝kݰʹ_&'+.ģoBxlD#&([/r8{*sBUe|SLux#Ʀ2o)c+j|3%R0 b`^. JxIZ~N`bTD [- a!#s"TbUk[rf2bX;)\_ >y,F5SŝOy%" '.lWW8 ](3bߎ+# -k /lW)[Hh{X)Fz% ӶA5iz}R~0lVݥfh'u[k[@+ZꚁT.Sb')g3'zNoY .$v]/3Dn.Ă_HՅx;&=TW/i%=[RKqN,Vvmmض|2$'Pp[~铳q4j4˜mZC;IMͣ9Qx4J0!)$_[=>׊BXfsipQl#D]p6rxh!=cn/6Z_Bz* 7HY&Ei$yGsdR&_]2¶hE C,j:M+~1k'~5Ѵ0FNiXf~sToo|U{\H8pX597$t xغ\/e.xJ8=^;4P["n}@(x20t>=$4&6:=P&Qy)PCt(җڌT5y|{kB5=)Yz:P'0M\e9 N'|Y{;!;3p61̮`Tj3vt"рB3~Yci^*YS7'}*ç:p8PaE2J . bBa b!Bdi `-m. "ʟp&);̏ wE҂{ot]4v\d"#f^u4vXa[_c01G"ҩA29 |v=Iڎ#G sLQ*slخ@F_:@'lEO?kш}DT1 gj戚ȉ\~p] 2}&`@f!(Zo 2`67q G3:͛=Ώni^9vJHt3߅΀i˜ ,_Z5]oϭ 19]S:HM 2f3ʙ`LS0VHsDeJ . = 3˻/$,%(raEg]>VhOf5g9~GII,ZFG(ryJldNv0JFК pO(DmG8C0 / e# + xEJZ^**<\W߇7x4.pq1H)ۆZY,۝j|gp,^d_ΒHPE,SB "6,vo`ac=L)c9H('0s2~b 2?cF/ L0c p@Mکq9Hdj`:)D%RX씎 @ #+;7)X@p*d]"Ck0`3zNI++0q^w-p$ d'΀dI+lP_2^W:H-a եi94o]3[7Hj,Gva+&Qp&%FFQhz$ԍI^yf4up֥ "ڥ@0=㫠nU^ɋʵE'U.,Gֵ%2+pV@KYr\ a"ͮ v"h}@h+KHQ ݉ aGyS>fd=zkÑ]Y`y~ND cWN~㍝L`ꊇLq4c^b !:C08#s$I\Y0z_< ITCmȺvq7v{4:FA&%q i.^ M0@9pژC/(ع8N@eA֤H2ԷarPBN8ϳJr@mM?&qq KQ]$|,4T\u ʹ8p%!|Jm^d`BXT: FN QH S"qߪk}(R"%G} `K=U` ˘JuKaXWzA &vB {qz%M}}wf޻Xv]|YįQtvs^ FukYKtvYƒ=w 6ق8HK? Od'IԎ? ΓsrmP+auw{ClIz%GiHNQKivd)^nꨝ !>2Yz f؟zYC\բ C]X;;rwvhl7`\v!|77:Ku\lX1yCBÞ Z&Y/Zǟ \,OK@9r<`B$$kDJh1)?VWg걶̾)?xɞ2Y?eo>7.<ǔBuЀ}ldJZsh׎(j[\ p h_ Z7gSln3;tZB|寊={i_\ޑ \26-m":hjz!4+X*[!XdHIrޒ" tE[b @Ce߻9_JfRk)Df[r?^"VJ! lH#J-)bYA09ꚠ빪Ym. !qSNqot܁ѩɺ7=b]91%+i֤B#m8,Ե6΂ݼ+Ť*mJ&Ha#zXNi=/h'?$2? u(͇R#7uzRPr):9оY6b^/&앲2ؤ,D$Qr:MBJњ'okd:wRP2=(.3hWRX4l;M 5+}j\l[|ȓ1ZڄbF4(]eV[dd71:g9%KJIiɡ D*jp)ְ&sbp\;}qݞ2VC `*PVh\b퐌RKQ:H>=;Id3/!1:Nʟ1:zk7LKtD,v.5CZ#yloIeMDI(W MC]lnE 3^h%{wlV~ Mn)ݵ5?}k<\pP_.}Y?=@kI,-N[ם(Љ1v^;m>ޓ7 z+:!FC4ued}~Pqv|({.$ٞS˥z ~}Gc.&{=ekU,[NJiB>7 ɑ!` h^ǒ9qꈋeW*,n`SřʘZIU mJN]n_*xo=NŲh:qe{UoVa`-9%\(`MNb쵱Kz7+IBVȋY8P zn¾hc |bZf'we@eNu:"nj< "`ԽzE{zb]t5ҙ%ZSFz yn{)E_+y5i"=YQl|9틄)\S`O}<^:)e~=4B&Es2?:xDG}y_섟gd^^͇CHۑjM?x6Cݬ_!z{,n `T!bo Ϫ,6ʬ ؘG̝z';~ך%rw:k%}ۡroSP2"WZ{0kwS)w皑pDu~aYgOДR-AD{dKL`ηrZ%k:*{7RJ' hevqZěݔ#-x &HQ-<])U$V|reHm.|=hu6#:Vth J>IIZu!;al'+n{ƫsz`<z0N\a/1r@yn8VTpgߒvJ=voK)CM;]UA \,5uJ;-j}B @^l0 iǜ-MbfW:Dk`a)adžZϖb%ELEI]ӯ89[p֯XBΌ_Yw"g/>lrԪ^x&SM&=I&ՠY5dNI+S!c`7NٍF:oE}R) >>^< TI⁓g#avlrF 0X8`>Ԡ.nn7;3R'V+OGΛ}#CUvY[CUHwmB$UYK?"+n'pp7{7eZXrƌlC|3djs]ʇUIa`&hqm/7&2'i~F c#[n`EO61/Faiq&pԒ7^?ȷ!_e#+62p˥ۇqW(0(DCTe)m~_TdBjCզb^f-Vd^?)'@F:}G=7M|fmN8JX)v~gjp3hb7?0vj=/7c"V΅!kTl*Pnhg/28td]]|ę,zi<'~vn/.\:.X; y-Nk/yim[]5vɾ,T;1EY^[C ڨ tUf^P%oO: jUG/blHrB'oQTyM={uoaQ>4 -">$Ț(i=H^͊lƅ2Q#C<# d>Ql)CU:=?t1-*rgtØ!Ξ?`6ꋀ EDzXQ&w9hŊd0/BzdɼN` &ˠiZ:^ㄕۑ"_"ovh%Llz79R6Cg|M:A6^M:vʖ#z~ZI! 5Y lZ$+V@^u ?5$r!wEiRQ Г /EiyE!wa8?HR>9;Yo2Zb&YrCLdW-UJ}F*7'P"胉 aն rg#n6,vL* WK.Q%ުfA"E>2d 65b>w8rxdYwl+gmY,'_E?>?v덎Fmٿ\6~+Ui\u},kN5yt1ZV"Wmu>OG')Ǹ >jj·Nv]YoDz+nP/X>ApIզM4;ARҐ"͐$چ!t}U]KWuw=,mYN¼^E{ +iG'⃚z{[aNPWMZ)1DJ`n\+JM!2< LREv0q=0pD:TXch5袊R䄐`A|zJgzإ%iy!DO*yK%-P՚1X*  %%jS+ͩZ TGR kL$-֜tKKfB*QeҤxKM~/}T±(\Ox/b% ThQF kG;Z:j_޿qY|㻞ZT!HY T(Ct]WkQRuNÄ^j[-PM\٠ԖĞLv9(]q0ke؃8+T0TaLc Qm4C*o0!|$XNXᰡI BqJ (7 ݶN4w(jK#GĖ%)uqjtH%HK#b{cZXpNTmwnaA$: .l"D(GYt$(?^A ޹X\$&!At/ 8 ᦕ!1ջԗz@&$xj#XW.- ä 0!3jI,Yk Sdb ]p`φ %BE +StC7r3m f(RKBt@o=Q[;]|,ÂЭJ>b:aO"t@6&Bh!#v3i>HlKj˄HRe20φBkbL]Jr> o'}T ,Oݼ48/"+3McnuU;)5rlM;'e*D?iX ù['[[%׼yi{4-"-F`t/,Jtӓ6S2W]osy;Q1hL&6uP p# On:bRv9% * Tg#LP o&D^'XMz`k/T٣R֨BAсsV|.RسQfSVgly)bB.2ڝT-3yԺc.告QWp>LRsDg'߇Yɻl|vz "E)`woN:owV8K#7&Pg|I+^baGy%$o(5}Uh]~6<=?pg`l8s֬f򂖼 YVzmg$ $|K̠3_|p0o W"dL$?$*t>Y"zk_Q.c@m-΀w|y`iI c[Gxp^|8GM z>oI1V?{~Z,nwW\7REFbxown.Lpҥ+TVE/ *~48Ppf/{z5Efṅe]L; Ab͍F*10xM4h eMݤ_@[QY!KUU^ ZLcŌ3KM0jJ^{n1[ \__,`1E:ORtU}D!KRl_:S3vו8xYrFv~CJ 0|^o:Tr$$pIs K5PyWpspպ@`~dEOޜҹ9Oɻ7'y=4ΖMa(y?agߔR~8˾N3+ yU8^ٗ'yxozႝ[^s8 |l3}vsWaށ*!L]qp nmCK3+v~Xemۆ*f*'vO09\LHRCtćD~2@|^z>G9i{cU7\wA0VYv_) c3&)p~e-gymysmDcZy!A>_u+C5F)Peن2J`fi'NFJ'eZA^N..iϣdu:ݍyEPtoC 2ڲc|c1DYzi(`@⩔+,_Ӫ +ש/7 Zb6c\WɷƄ~0rjIB6)~ok7y}ChNwnigddoڭy,S!!\DdeSRsE;-HbL3ߕ?''zya'5AаoO^YA+ċR",%v>dY1ŏ+;*_ ^96䌬n}$dp͍$̇ * Shc|yBH\1(L[u稷"]~~u6(o i$iőMxƅq¡h cA to^0`FE%4YZ9+B1k>Rᖅ*ܚ+&r}uLA".袍p-2G,LD0oQl12ԐúZbf]5% oK}- D`=I#BtbK6C8bQ:BVEArG.#+S]\|rf}VSw`&7Wѵă,@>YPm5@"qΒPkK5in>ŕS@{"( &) J4<@61g^7b!)#LmʖZ#VF=ΉF.* 40K Ls4N!4?XY"Oj y 8䙙)λTiUUלdrdj-"v*3A蝌( !VJXn2#,lՃPV&/Z14PBDIM6mϙ$o,҆B+b0/gBB13eW>\T +$u Xchڬa BX#NJ &P!X ,㟋\" j~s2pfZ {$ 1J3QpLIHaGE"1VnuS*t5hͩX4OrV?[~@AoJI BQMTR0O[䩔!7[`\ozHT))]OUaŏTg(+W+FZJŖ[̞$jM ~lV[M&foj2٤`*>'zc8x"Iw[D0(uix+kW5wU.b=P hɗ4-n?Z+f9 ĺ0jv-6BOv7KƐova-f?`ӤIۏ%kIl.NS8p8'Xz.?ze5ު[%p2&PH:/% A&R>wbOxhUᢨ( ?:hΙ hOҤs:22lhzy@bYQV6]-nfI x M<=UJP#LA|ܳ l_a41h,`"ln庝z//KΙ\SZԧAJBsGf K7p(ٔ8ܒ123Et6+[[S}|t71M `  ]椈<-{6vzxGFz4aMoh tYzX~ς=$"zd&*HY #tu )|Mw6bH FKXAl~IwA}o*e;U+&lZшh0fWIb'Oԝ'!8-!`c@Kg]؜U Lp׬mUpR)/f$<2OjΠ 10525ms (15:45:27.739) Jan 28 15:45:27 crc kubenswrapper[4811]: Trace[1530500721]: [10.525665246s] [10.525665246s] END Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.739289 4811 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:27 crc kubenswrapper[4811]: E0128 15:45:27.743958 4811 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.744218 4811 trace.go:236] Trace[1312818338]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (28-Jan-2026 15:45:16.833) (total time: 10910ms): Jan 28 15:45:27 crc kubenswrapper[4811]: Trace[1312818338]: ---"Objects listed" error: 10910ms (15:45:27.744) Jan 28 15:45:27 crc kubenswrapper[4811]: Trace[1312818338]: [10.910671081s] [10.910671081s] END Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.744238 4811 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.744306 4811 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.748599 4811 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.766309 4811 csr.go:261] certificate signing request csr-zjbsd is approved, waiting to be issued Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.774863 4811 csr.go:257] certificate signing request csr-zjbsd is issued Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.896043 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.896205 4811 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.897302 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.897325 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.897333 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:27 crc kubenswrapper[4811]: I0128 15:45:27.901256 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.028384 4811 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.141812 4811 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 28 15:45:28 crc kubenswrapper[4811]: W0128 15:45:28.142011 4811 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 28 15:45:28 crc kubenswrapper[4811]: W0128 15:45:28.142062 4811 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 28 15:45:28 crc kubenswrapper[4811]: W0128 15:45:28.142076 4811 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 28 15:45:28 crc kubenswrapper[4811]: W0128 15:45:28.142078 4811 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.254195 4811 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44838->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.254246 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44838->192.168.126.11:17697: read: connection reset by peer" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.274757 4811 apiserver.go:52] "Watching apiserver" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.279559 4811 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.279925 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-jxfkb","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.280344 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.280483 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.280584 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.280671 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.280778 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.281007 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.281068 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.281609 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.281690 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.281950 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.282831 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.282978 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.283122 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.283351 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.283764 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.283838 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.283858 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.283837 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.284180 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.284297 4811 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.284271 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.285470 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.285931 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.292472 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 02:28:51.716459689 +0000 UTC Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.298320 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.310018 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-mw4th"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.310334 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.310679 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-fbkfh"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.311246 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-4vcr6"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.311456 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.311478 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.311916 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.312159 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.312338 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.314550 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315267 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315314 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315396 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315533 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315625 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315658 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315694 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.315699 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.316731 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.326705 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.336646 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.346268 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349606 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349644 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349704 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349731 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349751 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349770 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349792 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349814 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349832 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349855 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349879 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349903 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349930 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349957 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.349980 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350000 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350020 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350043 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350066 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350087 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350111 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350135 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350155 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350176 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350202 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350222 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350239 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350261 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350280 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350297 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350318 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350337 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350355 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350371 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350395 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350416 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350456 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350476 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350494 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350516 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350536 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350554 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350576 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350597 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350621 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350649 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350669 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350687 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350710 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350729 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350751 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350991 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351014 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351033 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351050 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351068 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351086 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351108 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351126 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351148 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351168 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351187 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351208 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351226 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351246 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351272 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351294 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351315 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351342 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351365 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351388 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351405 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351423 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351458 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351477 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351499 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351526 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351544 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351564 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351585 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351608 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351627 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351648 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351669 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351686 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351706 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351728 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351747 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351767 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351788 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351811 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351830 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351851 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351874 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351891 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351916 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351944 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351969 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351993 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352015 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352038 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352055 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352075 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352096 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352116 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352135 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352154 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352176 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352193 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352215 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352236 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352257 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352278 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352299 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352321 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352340 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352363 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352385 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352404 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352440 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352529 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352552 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352574 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352596 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352614 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352635 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352655 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352674 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352695 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352716 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352735 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352755 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352776 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352797 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352814 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352835 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352867 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352889 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350524 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350551 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350926 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.350963 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351124 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351163 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351188 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351181 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351233 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351105 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351343 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351364 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.353079 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354026 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354230 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354400 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354358 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351565 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351610 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351627 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351661 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351677 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351775 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351776 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351901 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351985 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352058 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352166 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352195 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352206 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352214 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352222 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352214 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352348 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352488 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352690 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352829 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352867 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352898 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354542 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354792 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354954 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.354992 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.355483 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.355526 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.355818 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.355860 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.356019 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.356214 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.356230 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.356504 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.356722 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.357239 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.357271 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.357583 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.357760 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.357928 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.358013 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.358634 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.359040 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.359039 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.359450 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.359639 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.360109 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.360254 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.360877 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.361073 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.361109 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.361256 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.361262 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362520 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362682 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362858 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362876 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362875 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362904 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363023 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363212 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363324 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.351383 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363396 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363747 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363535 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.360837 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363561 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363566 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363644 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363799 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363844 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.360003 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.352916 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363954 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363980 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363996 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364085 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364239 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364310 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364412 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364451 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364556 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364575 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364597 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364658 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364664 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364704 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364744 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.364821 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365379 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365602 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.365759 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:45:28.865733131 +0000 UTC m=+21.620096714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365832 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365860 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365879 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365896 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365914 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365933 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365949 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365965 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365982 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.365997 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366012 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366026 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366042 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366086 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366101 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366116 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366152 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366169 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366186 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366201 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366238 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366254 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366268 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366283 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366320 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366335 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366350 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366365 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366399 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366416 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366459 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366476 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366502 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366548 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366568 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366586 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366624 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366642 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366657 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366697 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366714 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366730 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366745 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366784 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366802 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366818 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366868 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366883 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366900 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366936 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366952 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366972 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.366987 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367002 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367020 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367037 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367073 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367097 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367115 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367134 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367152 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367172 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367188 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367204 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367220 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367237 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367254 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367274 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbq5n\" (UniqueName: \"kubernetes.io/projected/c5ca7bfb-61e6-40f3-9d7d-7995b948593b-kube-api-access-cbq5n\") pod \"node-resolver-jxfkb\" (UID: \"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\") " pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367290 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c5ca7bfb-61e6-40f3-9d7d-7995b948593b-hosts-file\") pod \"node-resolver-jxfkb\" (UID: \"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\") " pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367308 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367324 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367342 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367418 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367474 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367486 4811 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367515 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367526 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367538 4811 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367553 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367564 4811 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367602 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367611 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367619 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367629 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367638 4811 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367648 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367679 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367689 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367699 4811 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367707 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367716 4811 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367726 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367754 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367766 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367774 4811 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367784 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367793 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367802 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367811 4811 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367839 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367848 4811 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367857 4811 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367866 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367876 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367885 4811 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367894 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367904 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367914 4811 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367924 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367934 4811 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367942 4811 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367952 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367960 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367969 4811 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367977 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367986 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.367995 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368063 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368074 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368083 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368092 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368102 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368111 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368119 4811 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368128 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368137 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368146 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368155 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368165 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368174 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368182 4811 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368192 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368200 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368210 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368219 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368229 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368239 4811 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368247 4811 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368256 4811 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368264 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368273 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368282 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368290 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368299 4811 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368309 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368318 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368327 4811 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368337 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368345 4811 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368354 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368364 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368373 4811 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368381 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368390 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368398 4811 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368407 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368417 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368439 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.368692 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.369285 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.369630 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.369646 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.369702 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.369833 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362575 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.369943 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.370173 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.370200 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.370309 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.362932 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.363112 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.370575 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.370884 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.371197 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.371252 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.372259 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.372287 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.372706 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.372837 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.373028 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.373264 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.373534 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.373654 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.373867 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.374055 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.374294 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.374619 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.375580 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.375693 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.375814 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.375960 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.375998 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.375986 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.376050 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.376118 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:28.876097838 +0000 UTC m=+21.630461501 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.376115 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.376220 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.376521 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.376631 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.376678 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.376748 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.377200 4811 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.377304 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.377569 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.377904 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.378155 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.378401 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.378991 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.379401 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.379874 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.379941 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.380066 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.380321 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.380557 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.381005 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.381155 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.381214 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:28.8811968 +0000 UTC m=+21.635560473 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.381478 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.381603 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.383146 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.383650 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.384868 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.385662 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.386859 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.387973 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.389004 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.389105 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.389173 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.389235 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.389251 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.389310 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:28.889293983 +0000 UTC m=+21.643657656 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.390099 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.392077 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.393849 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.394886 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.394939 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.394981 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395096 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395690 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.395726 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395734 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.395742 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.395825 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:28.895805224 +0000 UTC m=+21.650168887 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395206 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395385 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395396 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395490 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395142 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395924 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.396616 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.396749 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.396937 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.397042 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.397740 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.398131 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.395356 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.403556 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.406044 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.410626 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.410801 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.410861 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.411204 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.411629 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.416064 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.429643 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.430004 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.436384 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.440408 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.443002 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.447723 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.449023 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.449595 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.452025 4811 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65" exitCode=255 Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.452129 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65"} Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.452264 4811 scope.go:117] "RemoveContainer" containerID="eaf28051c6dd6fb40fc70f326e3ceaf3f5ca7979c85dd4e89feca173ce76477a" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.454194 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.462682 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469275 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469417 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469458 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-system-cni-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469474 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghjz7\" (UniqueName: \"kubernetes.io/projected/41421640-f722-4edf-a0d5-b120f095246a-kube-api-access-ghjz7\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469493 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbq5n\" (UniqueName: \"kubernetes.io/projected/c5ca7bfb-61e6-40f3-9d7d-7995b948593b-kube-api-access-cbq5n\") pod \"node-resolver-jxfkb\" (UID: \"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\") " pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469507 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c5ca7bfb-61e6-40f3-9d7d-7995b948593b-hosts-file\") pod \"node-resolver-jxfkb\" (UID: \"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\") " pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469522 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dc61e84d-a003-46db-924b-7f9ba7460f13-cni-binary-copy\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469537 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-kubelet\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469551 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2a0331a-ea7c-4888-a47e-e73015ab42e0-mcd-auth-proxy-config\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469567 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469581 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d2a0331a-ea7c-4888-a47e-e73015ab42e0-rootfs\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469595 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-os-release\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469610 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-k8s-cni-cncf-io\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469623 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-netns\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469639 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-daemon-config\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469653 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d2a0331a-ea7c-4888-a47e-e73015ab42e0-proxy-tls\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469475 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469666 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/41421640-f722-4edf-a0d5-b120f095246a-cni-binary-copy\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469714 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-etc-kubernetes\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469714 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c5ca7bfb-61e6-40f3-9d7d-7995b948593b-hosts-file\") pod \"node-resolver-jxfkb\" (UID: \"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\") " pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469775 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-system-cni-dir\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469847 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-socket-dir-parent\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469868 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-hostroot\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469883 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-multus-certs\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469913 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6zfs\" (UniqueName: \"kubernetes.io/projected/d2a0331a-ea7c-4888-a47e-e73015ab42e0-kube-api-access-v6zfs\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469945 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.469974 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-cni-multus\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470007 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-conf-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470029 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/41421640-f722-4edf-a0d5-b120f095246a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470085 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-cnibin\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470114 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-cnibin\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470129 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-cni-bin\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470144 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zklcw\" (UniqueName: \"kubernetes.io/projected/dc61e84d-a003-46db-924b-7f9ba7460f13-kube-api-access-zklcw\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470171 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-cni-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470187 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-os-release\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470479 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470519 4811 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470532 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470544 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470554 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.470563 4811 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471126 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471154 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471167 4811 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471178 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471293 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471305 4811 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471314 4811 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471323 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471333 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471343 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471352 4811 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471364 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471375 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471385 4811 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471398 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471426 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471455 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471472 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471482 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471493 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471505 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471515 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471527 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471538 4811 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471547 4811 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471555 4811 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471563 4811 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471572 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471581 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471589 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471596 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471604 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471613 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471621 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471629 4811 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471637 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471645 4811 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471652 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471660 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471669 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471678 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471689 4811 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471702 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471712 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471722 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471730 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471737 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471745 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471753 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471760 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471767 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471775 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471782 4811 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471790 4811 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471797 4811 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471805 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471813 4811 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471821 4811 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471828 4811 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471837 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471845 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471854 4811 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471862 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471871 4811 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471879 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471886 4811 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471893 4811 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471900 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471908 4811 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471915 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471923 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471930 4811 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471938 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471946 4811 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471954 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471961 4811 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471969 4811 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471977 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471985 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.471992 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472000 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472008 4811 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472016 4811 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472024 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472031 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472038 4811 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472046 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472054 4811 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472063 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472072 4811 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472079 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472087 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472095 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472104 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472112 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472120 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472129 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472136 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472144 4811 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472153 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472161 4811 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472194 4811 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.472204 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.476275 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.482787 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.482906 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbq5n\" (UniqueName: \"kubernetes.io/projected/c5ca7bfb-61e6-40f3-9d7d-7995b948593b-kube-api-access-cbq5n\") pod \"node-resolver-jxfkb\" (UID: \"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\") " pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.490706 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.501604 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.512578 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.524482 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.539264 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.548906 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.559099 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.570717 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573017 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-etc-kubernetes\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573072 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-system-cni-dir\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573106 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-socket-dir-parent\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573128 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-hostroot\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573150 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-multus-certs\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573172 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6zfs\" (UniqueName: \"kubernetes.io/projected/d2a0331a-ea7c-4888-a47e-e73015ab42e0-kube-api-access-v6zfs\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573166 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-etc-kubernetes\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573193 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-cni-multus\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573209 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-hostroot\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573254 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-multus-certs\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573179 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-system-cni-dir\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573263 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-cni-multus\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573213 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-conf-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573311 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-socket-dir-parent\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573314 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/41421640-f722-4edf-a0d5-b120f095246a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573458 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-conf-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573477 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zklcw\" (UniqueName: \"kubernetes.io/projected/dc61e84d-a003-46db-924b-7f9ba7460f13-kube-api-access-zklcw\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573509 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-cnibin\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573542 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-cnibin\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573562 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-cni-bin\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573585 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-cni-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573604 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-os-release\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573624 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-system-cni-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573639 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-cni-bin\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573644 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghjz7\" (UniqueName: \"kubernetes.io/projected/41421640-f722-4edf-a0d5-b120f095246a-kube-api-access-ghjz7\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573666 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-cnibin\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573670 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dc61e84d-a003-46db-924b-7f9ba7460f13-cni-binary-copy\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573691 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-kubelet\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573701 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-cnibin\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573711 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2a0331a-ea7c-4888-a47e-e73015ab42e0-mcd-auth-proxy-config\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573731 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-system-cni-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573733 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573757 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d2a0331a-ea7c-4888-a47e-e73015ab42e0-rootfs\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573818 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-os-release\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573844 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-k8s-cni-cncf-io\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573863 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-netns\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573883 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-cni-dir\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573884 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-daemon-config\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573916 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d2a0331a-ea7c-4888-a47e-e73015ab42e0-proxy-tls\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573920 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/41421640-f722-4edf-a0d5-b120f095246a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.573933 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/41421640-f722-4edf-a0d5-b120f095246a-cni-binary-copy\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574516 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/41421640-f722-4edf-a0d5-b120f095246a-cni-binary-copy\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574623 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dc61e84d-a003-46db-924b-7f9ba7460f13-multus-daemon-config\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574647 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-os-release\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574675 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d2a0331a-ea7c-4888-a47e-e73015ab42e0-rootfs\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574728 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574736 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/41421640-f722-4edf-a0d5-b120f095246a-os-release\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-var-lib-kubelet\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574773 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-k8s-cni-cncf-io\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.574797 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dc61e84d-a003-46db-924b-7f9ba7460f13-host-run-netns\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.575288 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2a0331a-ea7c-4888-a47e-e73015ab42e0-mcd-auth-proxy-config\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.575376 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dc61e84d-a003-46db-924b-7f9ba7460f13-cni-binary-copy\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.577181 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d2a0331a-ea7c-4888-a47e-e73015ab42e0-proxy-tls\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.579812 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.586287 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.588314 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6zfs\" (UniqueName: \"kubernetes.io/projected/d2a0331a-ea7c-4888-a47e-e73015ab42e0-kube-api-access-v6zfs\") pod \"machine-config-daemon-4vcr6\" (UID: \"d2a0331a-ea7c-4888-a47e-e73015ab42e0\") " pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.589275 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghjz7\" (UniqueName: \"kubernetes.io/projected/41421640-f722-4edf-a0d5-b120f095246a-kube-api-access-ghjz7\") pod \"multus-additional-cni-plugins-fbkfh\" (UID: \"41421640-f722-4edf-a0d5-b120f095246a\") " pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.591808 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zklcw\" (UniqueName: \"kubernetes.io/projected/dc61e84d-a003-46db-924b-7f9ba7460f13-kube-api-access-zklcw\") pod \"multus-mw4th\" (UID: \"dc61e84d-a003-46db-924b-7f9ba7460f13\") " pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.595151 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.599264 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.599310 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.609017 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.609084 4811 scope.go:117] "RemoveContainer" containerID="9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.609138 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.609255 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 28 15:45:28 crc kubenswrapper[4811]: W0128 15:45:28.611422 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-e091ae39bb381913eac16b5559a8968dabc66d4f9cbe76436ab60fdf845404cc WatchSource:0}: Error finding container e091ae39bb381913eac16b5559a8968dabc66d4f9cbe76436ab60fdf845404cc: Status 404 returned error can't find the container with id e091ae39bb381913eac16b5559a8968dabc66d4f9cbe76436ab60fdf845404cc Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.617703 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.617932 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.627803 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jxfkb" Jan 28 15:45:28 crc kubenswrapper[4811]: W0128 15:45:28.633618 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-99123cfc8f88a3aa6db192403b6d77a2ee26320a0fe2a8190ca5eff88c698e79 WatchSource:0}: Error finding container 99123cfc8f88a3aa6db192403b6d77a2ee26320a0fe2a8190ca5eff88c698e79: Status 404 returned error can't find the container with id 99123cfc8f88a3aa6db192403b6d77a2ee26320a0fe2a8190ca5eff88c698e79 Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.637015 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-mw4th" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.637163 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.644543 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.652587 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.681975 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.714781 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dkzf6"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.715504 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.715855 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.721339 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.721353 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.721554 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.721571 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.721692 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.721844 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.744733 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.745744 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.775982 4811 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-28 15:40:27 +0000 UTC, rotation deadline is 2026-12-22 00:58:01.012995446 +0000 UTC Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.776041 4811 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7857h12m32.23695735s for next certificate rotation Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.777743 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.789688 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.799123 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.811050 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.820965 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.831901 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.846989 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.857764 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.873517 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.875910 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876006 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-systemd\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876027 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-node-log\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876041 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-script-lib\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876060 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-env-overrides\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.876089 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:45:29.876062736 +0000 UTC m=+22.630426319 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876124 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3371905-8bb6-433c-b905-43d69e7b382a-ovn-node-metrics-cert\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876187 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876227 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-systemd-units\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876244 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876267 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-ovn-kubernetes\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876287 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876305 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-kubelet\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876320 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-ovn\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876336 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-bin\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876363 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4mzj\" (UniqueName: \"kubernetes.io/projected/d3371905-8bb6-433c-b905-43d69e7b382a-kube-api-access-j4mzj\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876378 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-log-socket\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876406 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-slash\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.876415 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876537 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-var-lib-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.876580 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:29.87656912 +0000 UTC m=+22.630932783 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876604 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-etc-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876631 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-netd\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876657 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-netns\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.876672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-config\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.884277 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.894099 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.907663 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaf28051c6dd6fb40fc70f326e3ceaf3f5ca7979c85dd4e89feca173ce76477a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"message\\\":\\\"W0128 15:45:11.342750 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0128 15:45:11.343609 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769615111 cert, and key in /tmp/serving-cert-2493861668/serving-signer.crt, /tmp/serving-cert-2493861668/serving-signer.key\\\\nI0128 15:45:11.825541 1 observer_polling.go:159] Starting file observer\\\\nW0128 15:45:11.829193 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0128 15:45:11.829311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0128 15:45:11.832273 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2493861668/tls.crt::/tmp/serving-cert-2493861668/tls.key\\\\\\\"\\\\nF0128 15:45:11.961001 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.928038 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.940230 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.951572 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.977881 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-systemd\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.977918 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-node-log\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.977937 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-env-overrides\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.977956 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3371905-8bb6-433c-b905-43d69e7b382a-ovn-node-metrics-cert\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.977973 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-script-lib\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978028 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978051 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978068 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-systemd-units\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-systemd\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978140 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-node-log\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978106 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978179 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-ovn-kubernetes\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978204 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-kubelet\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978220 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-ovn\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978233 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-bin\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978250 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978264 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978288 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978301 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978382 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-systemd-units\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978421 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-ovn-kubernetes\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978468 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-kubelet\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978479 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:29.978458552 +0000 UTC m=+22.732822225 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978522 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978499 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-ovn\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978268 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4mzj\" (UniqueName: \"kubernetes.io/projected/d3371905-8bb6-433c-b905-43d69e7b382a-kube-api-access-j4mzj\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978572 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-bin\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978584 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-slash\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-log-socket\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978634 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978671 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:29.978659888 +0000 UTC m=+22.733023471 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978689 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-etc-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978728 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978748 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978758 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978763 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-netd\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978785 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-etc-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: E0128 15:45:28.978787 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:29.978778121 +0000 UTC m=+22.733141794 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978733 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-netd\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978814 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-var-lib-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978823 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-slash\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978832 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-netns\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978848 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-config\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.978859 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-log-socket\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.979040 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-netns\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.979500 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-var-lib-openvswitch\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.980029 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-config\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.980101 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-env-overrides\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.981132 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-script-lib\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.984036 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:28 crc kubenswrapper[4811]: I0128 15:45:28.985734 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3371905-8bb6-433c-b905-43d69e7b382a-ovn-node-metrics-cert\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.014904 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4mzj\" (UniqueName: \"kubernetes.io/projected/d3371905-8bb6-433c-b905-43d69e7b382a-kube-api-access-j4mzj\") pod \"ovnkube-node-dkzf6\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.053583 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:29 crc kubenswrapper[4811]: W0128 15:45:29.065365 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3371905_8bb6_433c_b905_43d69e7b382a.slice/crio-26b4e82fe90c5d4c9d3413006a47ff41874d0e2d6bf1c08ea4204ff1ee39de15 WatchSource:0}: Error finding container 26b4e82fe90c5d4c9d3413006a47ff41874d0e2d6bf1c08ea4204ff1ee39de15: Status 404 returned error can't find the container with id 26b4e82fe90c5d4c9d3413006a47ff41874d0e2d6bf1c08ea4204ff1ee39de15 Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.293629 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 06:38:52.865964462 +0000 UTC Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.456244 4811 generic.go:334] "Generic (PLEG): container finished" podID="41421640-f722-4edf-a0d5-b120f095246a" containerID="247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d" exitCode=0 Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.456331 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerDied","Data":"247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.456400 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerStarted","Data":"8012b4445bf0c3a1665d6db38aaf68616526322360ab88ffdab57c6658a6edb8"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.461796 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.464808 4811 scope.go:117] "RemoveContainer" containerID="9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65" Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.464975 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.466311 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.466343 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.466355 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"544326008e315ece3de289fba53b40a88f9d49546e4113f0a01265f818007fd1"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.467093 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"99123cfc8f88a3aa6db192403b6d77a2ee26320a0fe2a8190ca5eff88c698e79"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.468109 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerStarted","Data":"45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.468141 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerStarted","Data":"e9826bfcaf29687b34eabd9f667e904c1f6ca5cec8ff75e1e38d785a341111b6"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.470827 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.470877 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e091ae39bb381913eac16b5559a8968dabc66d4f9cbe76436ab60fdf845404cc"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.471558 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.472382 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jxfkb" event={"ID":"c5ca7bfb-61e6-40f3-9d7d-7995b948593b","Type":"ContainerStarted","Data":"8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.472408 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jxfkb" event={"ID":"c5ca7bfb-61e6-40f3-9d7d-7995b948593b","Type":"ContainerStarted","Data":"267bcd73ae9f36f42ce1928e72d969ad2a5a08a9cf9e2f0808d7a92ffb5a59a3"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.474145 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.474192 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.474205 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0e9da8291366eca81d03fb9ed906a1f69d901462365eba99dd4582e96cb65668"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.475845 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" exitCode=0 Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.475954 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.476012 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"26b4e82fe90c5d4c9d3413006a47ff41874d0e2d6bf1c08ea4204ff1ee39de15"} Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.484269 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.485229 4811 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.495250 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.505925 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaf28051c6dd6fb40fc70f326e3ceaf3f5ca7979c85dd4e89feca173ce76477a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"message\\\":\\\"W0128 15:45:11.342750 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0128 15:45:11.343609 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769615111 cert, and key in /tmp/serving-cert-2493861668/serving-signer.crt, /tmp/serving-cert-2493861668/serving-signer.key\\\\nI0128 15:45:11.825541 1 observer_polling.go:159] Starting file observer\\\\nW0128 15:45:11.829193 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0128 15:45:11.829311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0128 15:45:11.832273 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2493861668/tls.crt::/tmp/serving-cert-2493861668/tls.key\\\\\\\"\\\\nF0128 15:45:11.961001 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.522154 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.534218 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.547464 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.562079 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.573525 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.596424 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.612747 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.628917 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.646379 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.665210 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.679182 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.692001 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.702156 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.746694 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.785698 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.824173 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.865938 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.889019 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.889115 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.889205 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.889247 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:45:31.88921983 +0000 UTC m=+24.643583413 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.889275 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:31.889267871 +0000 UTC m=+24.643631454 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.907691 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.945206 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.990678 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.990754 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.990780 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990887 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990903 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990913 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990930 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990963 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:31.990949637 +0000 UTC m=+24.745313220 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990969 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990972 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.991072 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:31.99104886 +0000 UTC m=+24.745412473 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.990984 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:29 crc kubenswrapper[4811]: E0128 15:45:29.991143 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:31.991134042 +0000 UTC m=+24.745497725 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:29 crc kubenswrapper[4811]: I0128 15:45:29.995694 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:29Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.024371 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.069611 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.294610 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 04:09:01.575174042 +0000 UTC Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.339277 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.339315 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.339277 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:30 crc kubenswrapper[4811]: E0128 15:45:30.339404 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:30 crc kubenswrapper[4811]: E0128 15:45:30.339515 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:30 crc kubenswrapper[4811]: E0128 15:45:30.339628 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.344121 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.344940 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.346312 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.347137 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.348348 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.348998 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.349723 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.350908 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.351729 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.352745 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.353367 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.355020 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.355945 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.356593 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.357793 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.358465 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.359740 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.360191 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.361580 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.362875 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.363462 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.364582 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.365584 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.367998 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.369701 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.370412 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.372766 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.373313 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.374115 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.375494 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.376091 4811 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.376218 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.379166 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.379819 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.380417 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.382967 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.384270 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.384964 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.386401 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.387732 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.388357 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.389539 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.390338 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.391786 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.392417 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.393897 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.394912 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.396972 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.397623 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.398389 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.399014 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.399679 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.400302 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.400930 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.481842 4811 generic.go:334] "Generic (PLEG): container finished" podID="41421640-f722-4edf-a0d5-b120f095246a" containerID="779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd" exitCode=0 Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.481900 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerDied","Data":"779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.485759 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.485782 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.485792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.485801 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.485811 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.485820 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.504338 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.516691 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.533015 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.552752 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.565163 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.578173 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.595628 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.607086 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.626964 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.641951 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.653163 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.668037 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:30 crc kubenswrapper[4811]: I0128 15:45:30.680365 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:30Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.295309 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 18:31:33.874943808 +0000 UTC Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.492118 4811 generic.go:334] "Generic (PLEG): container finished" podID="41421640-f722-4edf-a0d5-b120f095246a" containerID="f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928" exitCode=0 Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.492200 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerDied","Data":"f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928"} Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.493719 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52"} Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.505274 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.526139 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.539230 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.576147 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.591869 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.615326 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.633895 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.647736 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.663367 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.685561 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.692255 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-fwdqb"] Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.692659 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.695005 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.695051 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.695220 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.695374 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.698797 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.711351 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.725952 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.739419 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.758818 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.771485 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.786624 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.799215 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.806756 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9c138c07-9108-4af5-b2e0-623109bbdf9c-serviceca\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.806799 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s24s2\" (UniqueName: \"kubernetes.io/projected/9c138c07-9108-4af5-b2e0-623109bbdf9c-kube-api-access-s24s2\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.806818 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c138c07-9108-4af5-b2e0-623109bbdf9c-host\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.816293 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.828640 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.840329 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.853887 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.868937 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.879447 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.890186 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907023 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907121 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907242 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:31 crc kubenswrapper[4811]: E0128 15:45:31.907307 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:31 crc kubenswrapper[4811]: E0128 15:45:31.907306 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:45:35.907279137 +0000 UTC m=+28.661642720 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907419 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9c138c07-9108-4af5-b2e0-623109bbdf9c-serviceca\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: E0128 15:45:31.907454 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:35.907418031 +0000 UTC m=+28.661781654 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907489 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s24s2\" (UniqueName: \"kubernetes.io/projected/9c138c07-9108-4af5-b2e0-623109bbdf9c-kube-api-access-s24s2\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907518 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c138c07-9108-4af5-b2e0-623109bbdf9c-host\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.907599 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c138c07-9108-4af5-b2e0-623109bbdf9c-host\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.908378 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9c138c07-9108-4af5-b2e0-623109bbdf9c-serviceca\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.925245 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:31Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:31 crc kubenswrapper[4811]: I0128 15:45:31.927293 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s24s2\" (UniqueName: \"kubernetes.io/projected/9c138c07-9108-4af5-b2e0-623109bbdf9c-kube-api-access-s24s2\") pod \"node-ca-fwdqb\" (UID: \"9c138c07-9108-4af5-b2e0-623109bbdf9c\") " pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.006467 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-fwdqb" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.007877 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.007923 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.007956 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008054 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008056 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008097 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008110 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008126 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008067 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008181 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008166 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:36.008150571 +0000 UTC m=+28.762514154 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008236 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:36.008219823 +0000 UTC m=+28.762583406 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.008257 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:36.008250873 +0000 UTC m=+28.762614456 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:32 crc kubenswrapper[4811]: W0128 15:45:32.021688 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c138c07_9108_4af5_b2e0_623109bbdf9c.slice/crio-932c6dae6394629c20fb406507f8e10126ab9ee12f897231aaec996e652731c7 WatchSource:0}: Error finding container 932c6dae6394629c20fb406507f8e10126ab9ee12f897231aaec996e652731c7: Status 404 returned error can't find the container with id 932c6dae6394629c20fb406507f8e10126ab9ee12f897231aaec996e652731c7 Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.296413 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 23:19:10.913104375 +0000 UTC Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.338396 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.338502 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.338554 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.338581 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.338686 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.338840 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.401212 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.402047 4811 scope.go:117] "RemoveContainer" containerID="9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65" Jan 28 15:45:32 crc kubenswrapper[4811]: E0128 15:45:32.402303 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.500017 4811 generic.go:334] "Generic (PLEG): container finished" podID="41421640-f722-4edf-a0d5-b120f095246a" containerID="6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35" exitCode=0 Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.500111 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerDied","Data":"6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35"} Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.501421 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-fwdqb" event={"ID":"9c138c07-9108-4af5-b2e0-623109bbdf9c","Type":"ContainerStarted","Data":"932c6dae6394629c20fb406507f8e10126ab9ee12f897231aaec996e652731c7"} Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.515007 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.527007 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.539979 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.555825 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.577255 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.596285 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.611971 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.631096 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.645251 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.665730 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.677549 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:32 crc kubenswrapper[4811]: I0128 15:45:32.987711 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.002231 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:32Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.015812 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.297615 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 06:20:24.305525946 +0000 UTC Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.511409 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerStarted","Data":"24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd"} Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.516687 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.518587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-fwdqb" event={"ID":"9c138c07-9108-4af5-b2e0-623109bbdf9c","Type":"ContainerStarted","Data":"4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94"} Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.529288 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.555691 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.572362 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.587622 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.598957 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.624581 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.636086 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.649682 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.665672 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.681620 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.696667 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.711378 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.727361 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.746874 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.762151 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.778608 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.798943 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.816795 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.839241 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.855636 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.867229 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.885211 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.900129 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.911531 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.922528 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.931617 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.945006 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:33 crc kubenswrapper[4811]: I0128 15:45:33.967296 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:33Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.144059 4811 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.146392 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.146450 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.146462 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.146572 4811 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.152651 4811 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.152967 4811 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.154114 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.154150 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.154159 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.154173 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.154184 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.168123 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.171584 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.171651 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.171674 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.171703 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.171725 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.191819 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.195985 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.196029 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.196043 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.196060 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.196072 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.212073 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.215520 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.215558 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.215572 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.215589 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.215601 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.232217 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.236204 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.236239 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.236249 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.236263 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.236272 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.248410 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.248612 4811 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.250107 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.250149 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.250160 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.250176 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.250188 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.298027 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 12:55:42.713677744 +0000 UTC Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.338618 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.338687 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.338776 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.338839 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.338990 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:34 crc kubenswrapper[4811]: E0128 15:45:34.339165 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.358835 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.358866 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.358876 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.358890 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.358902 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.461693 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.461760 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.461784 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.461809 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.461828 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.476298 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.483366 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.485716 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.494534 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.519505 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.524481 4811 generic.go:334] "Generic (PLEG): container finished" podID="41421640-f722-4edf-a0d5-b120f095246a" containerID="24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd" exitCode=0 Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.524536 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerDied","Data":"24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.542272 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.560877 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.565645 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.565716 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.565741 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.565771 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.565794 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.579022 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.598480 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.623187 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.640382 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.655862 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.668210 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.668288 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.668313 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.668343 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.668365 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.676953 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.693684 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.706247 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.717388 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.732639 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.747105 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.760220 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.770570 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.770594 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.770601 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.770614 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.770623 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.774724 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.795297 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.806571 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.818391 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.827693 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.840315 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.855907 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.867982 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.872869 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.872914 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.872931 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.872951 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.872966 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.880424 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.893172 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.907710 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.923108 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.932694 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:34Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.975330 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.975736 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.975829 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.975926 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:34 crc kubenswrapper[4811]: I0128 15:45:34.976028 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:34Z","lastTransitionTime":"2026-01-28T15:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.079027 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.079075 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.079086 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.079104 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.079116 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.181683 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.182341 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.182394 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.182428 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.182486 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.284293 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.284320 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.284329 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.284343 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.284352 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.298118 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 07:33:57.367137578 +0000 UTC Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.387768 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.387800 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.387808 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.387821 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.387831 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.489870 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.490221 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.490232 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.490248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.490259 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.531504 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.531700 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.531746 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.544016 4811 generic.go:334] "Generic (PLEG): container finished" podID="41421640-f722-4edf-a0d5-b120f095246a" containerID="e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f" exitCode=0 Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.544131 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerDied","Data":"e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.552367 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.575835 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.576124 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.576697 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.589858 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.593314 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.593342 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.593350 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.593364 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.593373 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.610458 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.648512 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.671210 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.690659 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.695532 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.695575 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.695588 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.695606 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.695619 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.707773 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.721571 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.738867 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.752820 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.768371 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.781786 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.797283 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.798451 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.798492 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.798503 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.798520 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.798531 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.812351 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.826351 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.847032 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.861670 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.876675 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.889209 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.901885 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.901935 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.901946 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.901969 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.901983 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:35Z","lastTransitionTime":"2026-01-28T15:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.904726 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.924588 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.940600 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.955567 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.970015 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.983646 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:35 crc kubenswrapper[4811]: I0128 15:45:35.998826 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:35Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004573 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004620 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004637 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004654 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004666 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004703 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.004861 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.004905 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:45:44.004879175 +0000 UTC m=+36.759242778 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.004999 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.005068 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:44.00505161 +0000 UTC m=+36.759415203 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.011737 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.025463 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.044088 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.105517 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.105617 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.105716 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.105732 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.105835 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:44.105816601 +0000 UTC m=+36.860180184 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.105922 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.105690 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.105995 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.106022 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:44.105993076 +0000 UTC m=+36.860356699 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.106084 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.106099 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.106109 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.106149 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:44.10613906 +0000 UTC m=+36.860502643 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.107271 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.107369 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.107493 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.107581 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.107597 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.210160 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.210203 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.210213 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.210229 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.210240 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.298750 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 14:33:42.889853539 +0000 UTC Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.312323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.312371 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.312382 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.312401 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.312414 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.338564 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.338655 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.338727 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.338913 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.339090 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:36 crc kubenswrapper[4811]: E0128 15:45:36.339227 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.415453 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.415510 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.415525 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.415543 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.415557 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.517913 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.518008 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.518019 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.518033 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.518043 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.552084 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" event={"ID":"41421640-f722-4edf-a0d5-b120f095246a","Type":"ContainerStarted","Data":"32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.552743 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.565330 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.576522 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.591665 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.605748 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.620398 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.620465 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.620478 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.620495 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.620506 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.625581 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.638879 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.653192 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.662862 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.674811 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.686569 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.697227 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.707624 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.716411 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.724202 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.724235 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.724245 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.724259 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.724268 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.728674 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.746528 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:36Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.826708 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.826953 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.827026 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.827102 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.827159 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.930322 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.930382 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.930467 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.930497 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:36 crc kubenswrapper[4811]: I0128 15:45:36.930518 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:36Z","lastTransitionTime":"2026-01-28T15:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.035794 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.036228 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.036248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.036275 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.036300 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.139826 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.139898 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.139913 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.139931 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.139943 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.242197 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.242227 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.242236 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.242248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.242260 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.289413 4811 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.299833 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 03:35:52.23987021 +0000 UTC Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.344492 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.344766 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.344888 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.344969 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.345054 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.447496 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.447559 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.447579 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.447606 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.447624 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.550314 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.550411 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.550469 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.550498 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.550562 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.652501 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.652544 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.652557 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.652582 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.652596 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.692246 4811 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.754923 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.754954 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.754969 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.754997 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.755007 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.857962 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.858012 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.858026 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.858044 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.858072 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.960274 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.960314 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.960325 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.960340 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:37 crc kubenswrapper[4811]: I0128 15:45:37.960350 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:37Z","lastTransitionTime":"2026-01-28T15:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.062352 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.062385 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.062394 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.062406 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.062415 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.164386 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.164443 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.164453 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.164469 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.164483 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.266765 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.266797 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.266806 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.266818 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.266833 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.300526 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 13:23:37.459271584 +0000 UTC Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.338654 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.338779 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:38 crc kubenswrapper[4811]: E0128 15:45:38.338846 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:38 crc kubenswrapper[4811]: E0128 15:45:38.339007 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.339425 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:38 crc kubenswrapper[4811]: E0128 15:45:38.339503 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.352581 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.367627 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.369127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.369187 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.369201 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.369219 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.369236 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.384165 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.393652 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.414001 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.426018 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.438843 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.447669 4811 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.451471 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.468685 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.471707 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.471756 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.471790 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.471811 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.471822 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.488825 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.505131 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.517949 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.529017 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.539604 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.556052 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:38Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.574756 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.574815 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.574827 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.574842 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.574852 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.677544 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.677592 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.677604 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.677620 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.677635 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.780243 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.780313 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.780336 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.780365 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.780387 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.882880 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.882946 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.882962 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.882984 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.883003 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.985626 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.985695 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.985713 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.985737 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:38 crc kubenswrapper[4811]: I0128 15:45:38.985757 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:38Z","lastTransitionTime":"2026-01-28T15:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.089229 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.089278 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.089294 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.089317 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.089334 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.192540 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.192586 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.192596 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.192612 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.192626 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.295231 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.295270 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.295280 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.295296 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.295309 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.301486 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 12:34:51.178839621 +0000 UTC Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.397921 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.397958 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.397966 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.397981 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.397990 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.500512 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.500548 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.500557 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.500572 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.500581 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.562306 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/0.log" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.564885 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e" exitCode=1 Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.564918 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.565485 4811 scope.go:117] "RemoveContainer" containerID="9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.582633 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.602886 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.602938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.602953 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.602978 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.602989 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.607824 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.625746 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.643736 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.654809 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.667011 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.679558 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.691895 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.704647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.704685 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.704696 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.704710 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.704720 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.709979 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.733206 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.742951 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.760712 4811 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.762253 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:38Z\\\",\\\"message\\\":\\\"r 8 for removal\\\\nI0128 15:45:38.513405 6072 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0128 15:45:38.513094 6072 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:38.513458 6072 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:38.513462 6072 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0128 15:45:38.513477 6072 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0128 15:45:38.513479 6072 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0128 15:45:38.513486 6072 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0128 15:45:38.513500 6072 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0128 15:45:38.513510 6072 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0128 15:45:38.513561 6072 factory.go:656] Stopping watch factory\\\\nI0128 15:45:38.513574 6072 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0128 15:45:38.513579 6072 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0128 15:45:38.513584 6072 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0128 15:45:38.513590 6072 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0128 15:45:38.513593 6072 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.771211 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.782769 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.792997 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:39Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.807219 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.807258 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.807266 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.807279 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.807290 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.910733 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.910777 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.910785 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.910800 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:39 crc kubenswrapper[4811]: I0128 15:45:39.910811 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:39Z","lastTransitionTime":"2026-01-28T15:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.013813 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.013860 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.013879 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.013901 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.013915 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.115683 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.115730 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.115749 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.115765 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.115775 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.217637 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.217671 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.217679 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.217692 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.217701 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.302372 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 22:49:41.54159201 +0000 UTC Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.320015 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.320056 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.320076 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.320095 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.320107 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.338603 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.338654 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.338602 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:40 crc kubenswrapper[4811]: E0128 15:45:40.338731 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:40 crc kubenswrapper[4811]: E0128 15:45:40.338789 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:40 crc kubenswrapper[4811]: E0128 15:45:40.338863 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.422017 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.422093 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.422123 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.422153 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.422173 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.524332 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.524381 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.524396 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.524415 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.524428 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.570830 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/0.log" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.574123 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.574474 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.586712 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.596092 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.613629 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.626619 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.626668 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.626685 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.626706 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.626721 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.635661 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.648851 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.666548 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.680734 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.693200 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.710868 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:38Z\\\",\\\"message\\\":\\\"r 8 for removal\\\\nI0128 15:45:38.513405 6072 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0128 15:45:38.513094 6072 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:38.513458 6072 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:38.513462 6072 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0128 15:45:38.513477 6072 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0128 15:45:38.513479 6072 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0128 15:45:38.513486 6072 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0128 15:45:38.513500 6072 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0128 15:45:38.513510 6072 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0128 15:45:38.513561 6072 factory.go:656] Stopping watch factory\\\\nI0128 15:45:38.513574 6072 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0128 15:45:38.513579 6072 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0128 15:45:38.513584 6072 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0128 15:45:38.513590 6072 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0128 15:45:38.513593 6072 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.722884 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.728889 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.728930 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.728938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.728952 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.728963 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.735337 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.746368 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.754981 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.766190 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.777174 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.830879 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.830921 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.830933 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.830947 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.830958 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.885096 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl"] Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.885731 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.888154 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.888772 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.903351 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.914294 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.926262 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.932351 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.932386 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.932396 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.932414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.932425 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:40Z","lastTransitionTime":"2026-01-28T15:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.943213 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.952229 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3ac9a279-ca90-41f7-bf8d-d51717483598-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.952260 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3ac9a279-ca90-41f7-bf8d-d51717483598-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.952281 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3ac9a279-ca90-41f7-bf8d-d51717483598-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.952386 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j4mb\" (UniqueName: \"kubernetes.io/projected/3ac9a279-ca90-41f7-bf8d-d51717483598-kube-api-access-5j4mb\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.955850 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.981498 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:38Z\\\",\\\"message\\\":\\\"r 8 for removal\\\\nI0128 15:45:38.513405 6072 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0128 15:45:38.513094 6072 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:38.513458 6072 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:38.513462 6072 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0128 15:45:38.513477 6072 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0128 15:45:38.513479 6072 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0128 15:45:38.513486 6072 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0128 15:45:38.513500 6072 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0128 15:45:38.513510 6072 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0128 15:45:38.513561 6072 factory.go:656] Stopping watch factory\\\\nI0128 15:45:38.513574 6072 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0128 15:45:38.513579 6072 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0128 15:45:38.513584 6072 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0128 15:45:38.513590 6072 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0128 15:45:38.513593 6072 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:40 crc kubenswrapper[4811]: I0128 15:45:40.998815 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:40Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.014392 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.027280 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.034621 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.034684 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.034706 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.034731 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.034748 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.043015 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.053312 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3ac9a279-ca90-41f7-bf8d-d51717483598-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.053371 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3ac9a279-ca90-41f7-bf8d-d51717483598-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.053398 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3ac9a279-ca90-41f7-bf8d-d51717483598-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.053471 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j4mb\" (UniqueName: \"kubernetes.io/projected/3ac9a279-ca90-41f7-bf8d-d51717483598-kube-api-access-5j4mb\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.054176 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3ac9a279-ca90-41f7-bf8d-d51717483598-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.054559 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3ac9a279-ca90-41f7-bf8d-d51717483598-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.057029 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.058373 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3ac9a279-ca90-41f7-bf8d-d51717483598-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.075218 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j4mb\" (UniqueName: \"kubernetes.io/projected/3ac9a279-ca90-41f7-bf8d-d51717483598-kube-api-access-5j4mb\") pod \"ovnkube-control-plane-749d76644c-dj8kl\" (UID: \"3ac9a279-ca90-41f7-bf8d-d51717483598\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.075919 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.087346 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.100352 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.110455 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.122185 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.137846 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.137884 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.137894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.137910 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.137920 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.202503 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" Jan 28 15:45:41 crc kubenswrapper[4811]: W0128 15:45:41.220157 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ac9a279_ca90_41f7_bf8d_d51717483598.slice/crio-69302858b2de17b2423cf47ac0c467728687f3fcb9d36974441ece55b0edf408 WatchSource:0}: Error finding container 69302858b2de17b2423cf47ac0c467728687f3fcb9d36974441ece55b0edf408: Status 404 returned error can't find the container with id 69302858b2de17b2423cf47ac0c467728687f3fcb9d36974441ece55b0edf408 Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.239968 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.240029 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.240056 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.240086 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.240105 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.302724 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 03:29:44.278165236 +0000 UTC Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.344696 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.344739 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.344760 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.344778 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.344793 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.447032 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.447068 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.447078 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.447092 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.447101 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.549663 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.549693 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.549700 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.549712 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.549720 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.581060 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/1.log" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.582245 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/0.log" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.585248 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0" exitCode=1 Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.585283 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.585348 4811 scope.go:117] "RemoveContainer" containerID="9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.586597 4811 scope.go:117] "RemoveContainer" containerID="3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0" Jan 28 15:45:41 crc kubenswrapper[4811]: E0128 15:45:41.587005 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.589970 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" event={"ID":"3ac9a279-ca90-41f7-bf8d-d51717483598","Type":"ContainerStarted","Data":"2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.590061 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" event={"ID":"3ac9a279-ca90-41f7-bf8d-d51717483598","Type":"ContainerStarted","Data":"00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.590092 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" event={"ID":"3ac9a279-ca90-41f7-bf8d-d51717483598","Type":"ContainerStarted","Data":"69302858b2de17b2423cf47ac0c467728687f3fcb9d36974441ece55b0edf408"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.601740 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.618236 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.634970 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.652323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.652371 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.652382 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.652398 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.652409 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.667193 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.686725 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.695814 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.707468 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.724122 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.734634 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.745371 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.755149 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.755182 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.755191 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.755203 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.755212 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.758030 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.771485 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.787833 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:38Z\\\",\\\"message\\\":\\\"r 8 for removal\\\\nI0128 15:45:38.513405 6072 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0128 15:45:38.513094 6072 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:38.513458 6072 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:38.513462 6072 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0128 15:45:38.513477 6072 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0128 15:45:38.513479 6072 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0128 15:45:38.513486 6072 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0128 15:45:38.513500 6072 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0128 15:45:38.513510 6072 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0128 15:45:38.513561 6072 factory.go:656] Stopping watch factory\\\\nI0128 15:45:38.513574 6072 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0128 15:45:38.513579 6072 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0128 15:45:38.513584 6072 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0128 15:45:38.513590 6072 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0128 15:45:38.513593 6072 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.799685 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.811122 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.820567 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.833400 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.843807 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.855012 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.857503 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.857541 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.857550 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.857564 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.857573 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.872861 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.884712 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.897365 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.910460 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.925143 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.945028 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:38Z\\\",\\\"message\\\":\\\"r 8 for removal\\\\nI0128 15:45:38.513405 6072 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0128 15:45:38.513094 6072 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:38.513458 6072 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:38.513462 6072 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0128 15:45:38.513477 6072 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0128 15:45:38.513479 6072 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0128 15:45:38.513486 6072 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0128 15:45:38.513500 6072 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0128 15:45:38.513510 6072 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0128 15:45:38.513561 6072 factory.go:656] Stopping watch factory\\\\nI0128 15:45:38.513574 6072 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0128 15:45:38.513579 6072 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0128 15:45:38.513584 6072 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0128 15:45:38.513590 6072 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0128 15:45:38.513593 6072 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.959745 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.960757 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.960806 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.960818 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.960863 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.960876 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:41Z","lastTransitionTime":"2026-01-28T15:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.981085 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:41 crc kubenswrapper[4811]: I0128 15:45:41.995763 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:41Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.008546 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.021233 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.021982 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-t5mlc"] Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.022522 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.022600 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.038690 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.050287 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.060234 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.060296 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8llm\" (UniqueName: \"kubernetes.io/projected/bb3aac9c-e96e-4a5e-beb5-aeff56394467-kube-api-access-x8llm\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.062738 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.062809 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.062827 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.062852 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.062870 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.063952 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.075169 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.084930 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.097681 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.114642 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.124745 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.140171 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e283193c379bbcec5df1867cef357b6fd10e62ab707b2d7003cc7277193eb7e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:38Z\\\",\\\"message\\\":\\\"r 8 for removal\\\\nI0128 15:45:38.513405 6072 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0128 15:45:38.513094 6072 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:38.513458 6072 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:38.513462 6072 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0128 15:45:38.513477 6072 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0128 15:45:38.513479 6072 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0128 15:45:38.513486 6072 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0128 15:45:38.513500 6072 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0128 15:45:38.513510 6072 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0128 15:45:38.513561 6072 factory.go:656] Stopping watch factory\\\\nI0128 15:45:38.513574 6072 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0128 15:45:38.513579 6072 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0128 15:45:38.513584 6072 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0128 15:45:38.513590 6072 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0128 15:45:38.513593 6072 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.148743 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.158987 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.161053 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.161117 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8llm\" (UniqueName: \"kubernetes.io/projected/bb3aac9c-e96e-4a5e-beb5-aeff56394467-kube-api-access-x8llm\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.161370 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.161467 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:42.661447475 +0000 UTC m=+35.415811058 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.165567 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.165605 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.165620 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.165637 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.165648 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.172465 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.179640 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8llm\" (UniqueName: \"kubernetes.io/projected/bb3aac9c-e96e-4a5e-beb5-aeff56394467-kube-api-access-x8llm\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.182314 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.192010 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.206497 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.222667 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.237015 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.248770 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.259804 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.267784 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.267820 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.267832 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.267852 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.267864 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.303259 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 14:32:33.901128992 +0000 UTC Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.338830 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.339014 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.338845 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.338845 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.339105 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.339191 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.369660 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.369714 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.369725 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.369744 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.369782 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.473082 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.473192 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.473211 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.473236 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.473256 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.576123 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.576176 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.576198 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.576216 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.576228 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.596466 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/1.log" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.604033 4811 scope.go:117] "RemoveContainer" containerID="3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.604392 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.621259 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.636606 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.653376 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.666269 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.666663 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:42 crc kubenswrapper[4811]: E0128 15:45:42.666803 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:43.666774063 +0000 UTC m=+36.421137646 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.668378 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.679034 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.679077 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.679092 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.679109 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.679122 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.685934 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.697830 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.707378 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.722489 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.743664 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.754190 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.764735 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.777086 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.781185 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.781222 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.781233 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.781249 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.781275 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.787391 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.804905 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.816977 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.827762 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.836084 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:42Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.883211 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.883250 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.883264 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.883282 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.883295 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.985353 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.985420 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.985451 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.985471 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:42 crc kubenswrapper[4811]: I0128 15:45:42.985483 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:42Z","lastTransitionTime":"2026-01-28T15:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.088517 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.088584 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.088621 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.088656 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.088683 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.190660 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.190690 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.190699 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.190711 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.190720 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.293751 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.293810 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.293831 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.293855 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.293871 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.303972 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 22:24:43.771469163 +0000 UTC Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.339795 4811 scope.go:117] "RemoveContainer" containerID="9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.397188 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.397254 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.397268 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.397289 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.397302 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.499576 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.499617 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.499629 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.499654 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.499666 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.601839 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.601868 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.601877 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.601892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.601904 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.605187 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.606838 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.607173 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.624885 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.644940 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.655072 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.665493 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.674205 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.676082 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:43 crc kubenswrapper[4811]: E0128 15:45:43.676196 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:43 crc kubenswrapper[4811]: E0128 15:45:43.676267 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:45.676254383 +0000 UTC m=+38.430617966 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.684150 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.703176 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.703896 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.703926 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.703938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.703954 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.703965 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.714197 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.723694 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.733996 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.746808 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.764339 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.779367 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.798306 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.806670 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.806715 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.806727 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.806745 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.806758 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.813843 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.827687 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.838621 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:43Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.909371 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.909410 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.909439 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.909454 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:43 crc kubenswrapper[4811]: I0128 15:45:43.909465 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:43Z","lastTransitionTime":"2026-01-28T15:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.011846 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.011880 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.011889 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.011905 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.011916 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.079594 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.079835 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:46:00.079801491 +0000 UTC m=+52.834165094 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.079989 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.080090 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.080146 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:00.0801362 +0000 UTC m=+52.834499783 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.113838 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.113885 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.113899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.113914 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.113928 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.181233 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.181299 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.181332 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181450 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181482 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181493 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181494 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181515 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181527 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181542 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:00.181527628 +0000 UTC m=+52.935891211 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181574 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:00.181558689 +0000 UTC m=+52.935922342 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181593 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.181715 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:00.181678392 +0000 UTC m=+52.936042015 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.216154 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.216198 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.216207 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.216222 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.216233 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.304883 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 02:22:58.074532422 +0000 UTC Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.319249 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.319288 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.319299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.319313 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.319323 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.338820 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.338858 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.338913 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.338918 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.338995 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.339088 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.339174 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.339293 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.421172 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.421209 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.421217 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.421233 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.421242 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.524206 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.524250 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.524264 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.524281 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.524292 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.548817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.548858 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.548868 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.548884 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.548894 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.562034 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:44Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.566081 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.566126 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.566142 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.566158 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.566169 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.579973 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:44Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.584725 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.584787 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.584809 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.584836 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.584858 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.600944 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:44Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.605646 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.605708 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.605721 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.605736 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.605747 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.619712 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:44Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.623318 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.623377 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.623395 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.623424 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.623503 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.636754 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:44Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:44 crc kubenswrapper[4811]: E0128 15:45:44.636921 4811 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.638825 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.638899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.638927 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.638952 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.638966 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.741385 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.741423 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.741461 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.741479 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.741491 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.844145 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.844203 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.844219 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.844242 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.844253 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.947069 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.947148 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.947198 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.947219 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:44 crc kubenswrapper[4811]: I0128 15:45:44.947231 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:44Z","lastTransitionTime":"2026-01-28T15:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.050240 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.050309 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.050318 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.050333 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.050342 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.154090 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.154167 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.154187 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.154212 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.154231 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.256938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.256971 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.256979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.256991 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.257000 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.305050 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 09:10:02.477063765 +0000 UTC Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.360299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.360348 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.360362 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.360378 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.360389 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.462804 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.462869 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.462901 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.462941 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.462964 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.566065 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.566131 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.566153 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.566184 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.566205 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.668987 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.669335 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.669605 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.669715 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.669751 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.696116 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:45 crc kubenswrapper[4811]: E0128 15:45:45.696285 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:45 crc kubenswrapper[4811]: E0128 15:45:45.696390 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:49.696366788 +0000 UTC m=+42.450730391 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.771858 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.771913 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.771925 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.771940 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.771951 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.874486 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.874539 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.874549 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.874564 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.874579 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.977462 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.977512 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.977522 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.977541 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:45 crc kubenswrapper[4811]: I0128 15:45:45.977554 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:45Z","lastTransitionTime":"2026-01-28T15:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.080545 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.080587 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.080598 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.080612 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.080621 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.182632 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.182676 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.182685 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.182699 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.182710 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.285056 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.285117 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.285133 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.285151 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.285162 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.306015 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 22:28:21.434720279 +0000 UTC Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.338771 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.338813 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.338873 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:46 crc kubenswrapper[4811]: E0128 15:45:46.338973 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.339023 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:46 crc kubenswrapper[4811]: E0128 15:45:46.339114 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:46 crc kubenswrapper[4811]: E0128 15:45:46.339069 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:46 crc kubenswrapper[4811]: E0128 15:45:46.339370 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.388127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.388176 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.388189 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.388207 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.388221 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.493319 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.493364 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.493377 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.494410 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.495044 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.597975 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.598021 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.598033 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.598067 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.598077 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.699635 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.699686 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.699703 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.699728 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.699745 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.803514 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.803566 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.803578 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.803596 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.803609 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.906568 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.906652 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.906671 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.906695 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:46 crc kubenswrapper[4811]: I0128 15:45:46.906715 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:46Z","lastTransitionTime":"2026-01-28T15:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.009218 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.009268 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.009279 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.009295 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.009308 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.111528 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.111883 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.111979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.112080 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.112167 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.214300 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.214624 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.214715 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.214791 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.214859 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.306685 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 03:41:13.590080541 +0000 UTC Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.316727 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.316755 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.316764 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.316778 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.316789 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.419785 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.419833 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.419844 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.419862 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.419874 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.521737 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.521770 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.521782 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.521796 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.521805 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.623955 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.623983 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.623991 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.624004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.624013 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.726517 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.726563 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.726574 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.726588 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.726601 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.829389 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.829455 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.829467 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.829482 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.829492 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.931747 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.931803 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.931818 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.931841 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:47 crc kubenswrapper[4811]: I0128 15:45:47.931856 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:47Z","lastTransitionTime":"2026-01-28T15:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.034324 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.034379 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.034397 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.034420 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.034465 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.137302 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.137342 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.137366 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.137390 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.137406 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.239921 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.239997 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.240014 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.240035 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.240047 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.307724 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 10:06:38.047117616 +0000 UTC Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.338889 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:48 crc kubenswrapper[4811]: E0128 15:45:48.339046 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.339116 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.339147 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:48 crc kubenswrapper[4811]: E0128 15:45:48.339271 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:48 crc kubenswrapper[4811]: E0128 15:45:48.339410 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.339544 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:48 crc kubenswrapper[4811]: E0128 15:45:48.339705 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.346549 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.346646 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.346674 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.346711 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.346746 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.359711 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.374576 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.390044 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.403556 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.426798 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.437085 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.449734 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.449772 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.449783 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.449798 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.449810 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.450888 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.482636 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.505400 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.518888 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.533836 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.544252 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.551730 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.551764 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.551774 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.551789 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.551798 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.563487 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.575620 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.588546 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.601651 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.611685 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:48Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.653664 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.653700 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.653709 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.653722 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.653732 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.755979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.756072 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.756125 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.756150 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.756198 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.864037 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.864091 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.864103 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.864122 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.864136 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.966651 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.966699 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.966711 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.966727 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:48 crc kubenswrapper[4811]: I0128 15:45:48.966739 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:48Z","lastTransitionTime":"2026-01-28T15:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.070373 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.070473 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.070490 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.070510 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.070523 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.173540 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.173652 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.173673 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.173701 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.173723 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.276616 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.276727 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.276751 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.276787 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.276811 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.308945 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 00:53:25.876795418 +0000 UTC Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.379319 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.379358 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.379369 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.379383 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.379394 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.481676 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.481927 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.482080 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.482216 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.482340 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.585088 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.585127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.585138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.585158 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.585171 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.688004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.688048 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.688061 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.688078 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.688091 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.739028 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:49 crc kubenswrapper[4811]: E0128 15:45:49.739197 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:49 crc kubenswrapper[4811]: E0128 15:45:49.739301 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:45:57.739278752 +0000 UTC m=+50.493642405 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.791054 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.791111 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.791122 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.791139 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.791150 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.893717 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.893759 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.893769 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.893785 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.893797 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.996608 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.996898 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.997005 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.997144 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:49 crc kubenswrapper[4811]: I0128 15:45:49.997223 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:49Z","lastTransitionTime":"2026-01-28T15:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.100592 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.100631 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.100647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.100667 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.100681 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.202909 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.203207 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.203309 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.203405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.203534 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.305513 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.305795 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.305876 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.305983 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.306043 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.309820 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 23:24:37.15540204 +0000 UTC Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.339120 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.339192 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.339158 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:50 crc kubenswrapper[4811]: E0128 15:45:50.339342 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.339511 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:50 crc kubenswrapper[4811]: E0128 15:45:50.339517 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:50 crc kubenswrapper[4811]: E0128 15:45:50.339641 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:50 crc kubenswrapper[4811]: E0128 15:45:50.339756 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.409572 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.409636 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.409655 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.409690 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.409709 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.512698 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.513025 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.513115 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.513196 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.513274 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.616215 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.616263 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.616275 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.616293 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.616305 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.719313 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.719386 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.719405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.719468 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.719496 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.822900 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.822940 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.822948 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.822964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.822976 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.925213 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.925263 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.925278 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.925295 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:50 crc kubenswrapper[4811]: I0128 15:45:50.925308 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:50Z","lastTransitionTime":"2026-01-28T15:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.027364 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.027402 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.027413 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.027448 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.027461 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.129394 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.129467 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.129484 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.129502 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.129514 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.232278 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.232324 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.232335 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.232351 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.232363 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.310378 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 02:26:18.687319536 +0000 UTC Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.335359 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.335414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.335422 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.335465 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.335477 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.437339 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.437376 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.437387 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.437401 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.437414 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.541463 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.541509 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.541521 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.541546 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.541557 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.644421 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.644460 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.644468 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.644482 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.644490 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.747561 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.747613 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.747624 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.747638 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.747648 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.850682 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.850719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.850727 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.850741 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.850752 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.953247 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.953287 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.953297 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.953310 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:51 crc kubenswrapper[4811]: I0128 15:45:51.953320 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:51Z","lastTransitionTime":"2026-01-28T15:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.056163 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.056208 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.056219 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.056234 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.056245 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.159543 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.159607 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.159627 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.159657 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.159680 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.262803 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.262850 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.262858 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.262873 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.262883 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.311524 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 04:30:43.119303317 +0000 UTC Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.339172 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.339240 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.339400 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:52 crc kubenswrapper[4811]: E0128 15:45:52.339324 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.339257 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:52 crc kubenswrapper[4811]: E0128 15:45:52.339630 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:52 crc kubenswrapper[4811]: E0128 15:45:52.339759 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:52 crc kubenswrapper[4811]: E0128 15:45:52.339918 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.365223 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.365279 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.365291 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.365308 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.365321 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.468137 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.468176 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.468186 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.468199 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.468208 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.570039 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.570083 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.570091 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.570105 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.570114 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.672903 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.672969 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.672988 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.673016 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.673036 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.775991 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.776086 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.776113 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.776137 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.776155 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.883328 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.883398 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.883415 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.883465 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.883483 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.986007 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.986055 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.986065 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.986080 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:52 crc kubenswrapper[4811]: I0128 15:45:52.986092 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:52Z","lastTransitionTime":"2026-01-28T15:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.089074 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.089125 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.089134 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.089151 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.089162 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.192057 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.192132 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.192155 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.192187 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.192210 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.295718 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.295767 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.295780 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.295800 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.295814 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.312020 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 17:45:48.948917129 +0000 UTC Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.397839 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.397885 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.397895 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.397915 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.397928 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.500854 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.500918 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.500935 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.500958 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.500974 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.604138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.604180 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.604190 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.604207 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.604220 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.707307 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.707373 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.707391 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.707414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.707464 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.810207 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.810273 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.810285 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.810303 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.810312 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.913391 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.913458 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.913468 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.913487 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:53 crc kubenswrapper[4811]: I0128 15:45:53.913497 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:53Z","lastTransitionTime":"2026-01-28T15:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.016682 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.016732 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.016748 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.016765 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.016776 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.119299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.119348 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.119362 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.119380 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.119395 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.221620 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.221711 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.221737 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.221768 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.221793 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.313023 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 10:06:24.287592112 +0000 UTC Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.324774 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.324828 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.324845 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.324867 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.324884 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.338397 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.338512 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.338625 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:54 crc kubenswrapper[4811]: E0128 15:45:54.338633 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.338741 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:54 crc kubenswrapper[4811]: E0128 15:45:54.338930 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:54 crc kubenswrapper[4811]: E0128 15:45:54.339048 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:54 crc kubenswrapper[4811]: E0128 15:45:54.339191 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.427514 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.427634 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.427658 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.427683 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.427698 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.530490 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.530560 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.530574 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.530600 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.530618 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.633737 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.633817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.633829 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.633849 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.633863 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.740359 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.740489 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.740503 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.740528 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.740542 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.843745 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.843794 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.843806 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.843824 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.843837 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.946227 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.946305 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.946323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.946354 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:54 crc kubenswrapper[4811]: I0128 15:45:54.946393 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:54Z","lastTransitionTime":"2026-01-28T15:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.002897 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.002985 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.003015 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.003044 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.003065 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: E0128 15:45:55.027423 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.032729 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.032823 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.032842 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.032866 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.032884 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: E0128 15:45:55.047611 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.053138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.053160 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.053171 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.053186 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.053198 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: E0128 15:45:55.071227 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.075491 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.075522 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.075533 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.075549 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.075563 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: E0128 15:45:55.088924 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.093411 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.093464 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.093473 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.093487 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.093497 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.097282 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:45:55 crc kubenswrapper[4811]: E0128 15:45:55.106755 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: E0128 15:45:55.106927 4811 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.108580 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.108630 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.108641 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.108660 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.108677 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.115307 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.137280 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.153648 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.167841 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.179254 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.192730 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.204104 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.211243 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.211277 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.211302 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.211318 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.211328 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.215772 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.228739 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.248664 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.261939 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.283263 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.293182 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.306087 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.313291 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 21:33:22.152594396 +0000 UTC Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.313786 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.313815 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.313823 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.313836 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.313861 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.317731 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.328195 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.339458 4811 scope.go:117] "RemoveContainer" containerID="3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.341134 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.416879 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.416936 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.416948 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.416964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.416973 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.519610 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.519647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.519657 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.519672 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.519683 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.540107 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.549375 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.554580 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.578048 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.591613 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.604772 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.614605 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.622167 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.622202 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.622214 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.622234 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.622247 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.630547 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.643123 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.649313 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/1.log" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.652216 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.653001 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.660204 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.672550 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.685043 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.695802 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.716353 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.724487 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.724537 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.724547 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.724562 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.724574 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.729131 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.745379 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.763013 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.775615 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.790043 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.802759 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.820548 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.828344 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.828400 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.828414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.828455 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.828472 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.844031 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.862652 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.890000 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.912400 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.928371 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.931215 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.931266 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.931279 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.931300 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.931313 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:55Z","lastTransitionTime":"2026-01-28T15:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.949504 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.961560 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.978550 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:55 crc kubenswrapper[4811]: I0128 15:45:55.998205 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:55Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.011134 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.026075 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.033615 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.033668 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.033678 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.033699 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.033713 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.039560 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.053556 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.069154 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.082539 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.111257 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.135856 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.135891 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.135905 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.135922 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.135934 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.237965 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.238206 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.238299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.238395 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.238523 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.313964 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 06:56:18.57913513 +0000 UTC Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.338891 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.339031 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:56 crc kubenswrapper[4811]: E0128 15:45:56.339072 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:56 crc kubenswrapper[4811]: E0128 15:45:56.339233 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.339321 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:56 crc kubenswrapper[4811]: E0128 15:45:56.339411 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.339466 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:56 crc kubenswrapper[4811]: E0128 15:45:56.339539 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.341316 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.341352 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.341363 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.341382 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.341398 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.444265 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.444323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.444336 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.444354 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.444367 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.547656 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.547718 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.547735 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.547758 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.547772 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.651042 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.651112 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.651124 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.651145 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.651158 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.658245 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/2.log" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.659041 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/1.log" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.662455 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913" exitCode=1 Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.662523 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.662598 4811 scope.go:117] "RemoveContainer" containerID="3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.663415 4811 scope.go:117] "RemoveContainer" containerID="87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913" Jan 28 15:45:56 crc kubenswrapper[4811]: E0128 15:45:56.663670 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.680507 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.705177 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.720467 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.734041 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.745044 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.753617 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.753658 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.753675 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.753692 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.753706 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.765078 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cbc7be5530d165c2eceeefaa705334b7f8dfb0115bb21ac90ab2697c71ba3d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"message\\\":\\\":40.553981 6230 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554051 6230 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554284 6230 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554593 6230 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554672 6230 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.554763 6230 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0128 15:45:40.555220 6230 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0128 15:45:40.555286 6230 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0128 15:45:40.555298 6230 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0128 15:45:40.555337 6230 factory.go:656] Stopping watch factory\\\\nI0128 15:45:40.555374 6230 handler.go:208] Removed *v1.Node event handler 7\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.Node event handler 2\\\\nI0128 15:45:40.555391 6230 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.777467 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.789044 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.800703 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.810317 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.821729 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.834285 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.844095 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.852417 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.855290 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.855319 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.855330 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.855346 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.855358 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.863418 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.876461 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.886129 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.895080 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:56Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.958211 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.958281 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.958303 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.958332 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:56 crc kubenswrapper[4811]: I0128 15:45:56.958348 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:56Z","lastTransitionTime":"2026-01-28T15:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.061687 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.061750 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.061764 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.061784 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.061798 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.164749 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.164812 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.165077 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.165110 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.165134 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.267823 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.267875 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.267888 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.267904 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.267913 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.315332 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 02:31:23.808312661 +0000 UTC Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.370131 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.370184 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.370201 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.370224 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.370241 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.473490 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.473530 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.473540 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.473553 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.473562 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.576184 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.576248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.576267 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.576290 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.576307 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.668859 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/2.log" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.673449 4811 scope.go:117] "RemoveContainer" containerID="87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913" Jan 28 15:45:57 crc kubenswrapper[4811]: E0128 15:45:57.673677 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.678492 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.678527 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.678537 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.678554 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.678566 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.698735 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.716365 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.734777 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.748318 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.764371 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.779484 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.781654 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.781726 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.781744 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.781776 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.781793 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.795870 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.810768 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.822727 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:57 crc kubenswrapper[4811]: E0128 15:45:57.822879 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:57 crc kubenswrapper[4811]: E0128 15:45:57.822936 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:13.822922659 +0000 UTC m=+66.577286242 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.823572 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.842672 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.856327 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.876726 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.884344 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.884398 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.884409 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.884425 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.884458 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.894377 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.908903 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.925721 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.941970 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.954859 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.970184 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:57Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.987707 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.987769 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.987783 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.987802 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:57 crc kubenswrapper[4811]: I0128 15:45:57.987818 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:57Z","lastTransitionTime":"2026-01-28T15:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.091394 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.091471 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.091482 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.091499 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.091510 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.195118 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.195175 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.195193 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.195219 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.195241 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.298883 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.298961 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.298979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.299004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.299021 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.316377 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 06:26:48.893642894 +0000 UTC Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.338543 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.338649 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:45:58 crc kubenswrapper[4811]: E0128 15:45:58.338725 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.338715 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.338774 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:45:58 crc kubenswrapper[4811]: E0128 15:45:58.338792 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:45:58 crc kubenswrapper[4811]: E0128 15:45:58.338871 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:45:58 crc kubenswrapper[4811]: E0128 15:45:58.338991 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.355497 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.371969 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.385692 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.402475 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.402531 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.402544 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.402607 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.402623 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.407073 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.429974 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.443994 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.464237 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.484144 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.496783 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.505534 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.505595 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.505607 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.505623 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.505636 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.518338 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.532912 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.542930 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.554351 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.563682 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.574114 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.585476 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.600410 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.610543 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.610597 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.610608 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.610631 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.610645 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.612942 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:45:58Z is after 2025-08-24T17:21:41Z" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.713546 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.713604 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.713617 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.713637 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.713648 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.817603 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.817707 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.817757 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.817783 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.818062 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.920420 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.920488 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.920498 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.920515 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:58 crc kubenswrapper[4811]: I0128 15:45:58.920526 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:58Z","lastTransitionTime":"2026-01-28T15:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.023637 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.023682 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.023695 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.023714 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.023727 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.126231 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.126273 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.126288 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.126308 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.126324 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.229339 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.229395 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.229407 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.229427 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.229463 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.316989 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 15:38:32.298448384 +0000 UTC Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.332420 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.332477 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.332485 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.332498 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.332509 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.435886 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.435932 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.435953 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.435978 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.435993 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.539156 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.539235 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.539251 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.539296 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.539313 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.641877 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.641943 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.641959 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.641984 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.642001 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.745047 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.745091 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.745100 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.745119 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.745133 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.848209 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.848263 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.848272 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.848286 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.848297 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.951091 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.951135 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.951151 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.951166 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:45:59 crc kubenswrapper[4811]: I0128 15:45:59.951177 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:45:59Z","lastTransitionTime":"2026-01-28T15:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.054264 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.054355 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.054405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.054452 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.054487 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.149387 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.149589 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:46:32.149560315 +0000 UTC m=+84.903923898 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.149716 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.149824 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.149877 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:32.149868223 +0000 UTC m=+84.904231806 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.157376 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.157468 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.157488 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.157512 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.157529 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.250689 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.250750 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.250773 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.250906 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.250929 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.250941 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.250990 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.251015 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.251050 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.251000 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:32.250985553 +0000 UTC m=+85.005349136 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.251071 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.251101 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:32.251080776 +0000 UTC m=+85.005444369 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.251138 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:32.251118937 +0000 UTC m=+85.005482560 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.259817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.259894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.259906 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.259928 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.259940 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.317520 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 06:40:40.302787798 +0000 UTC Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.339226 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.339269 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.339307 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.339253 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.339488 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.339559 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.339599 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:00 crc kubenswrapper[4811]: E0128 15:46:00.339638 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.362851 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.362891 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.362902 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.362918 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.362929 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.465894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.465938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.465948 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.465965 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.465977 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.569409 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.569514 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.569532 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.569556 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.569573 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.671759 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.671846 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.671869 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.671899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.671923 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.774460 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.774505 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.774514 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.774532 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.774543 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.878017 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.878077 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.878089 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.878110 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.878127 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.981243 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.981316 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.981339 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.981370 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:00 crc kubenswrapper[4811]: I0128 15:46:00.981391 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:00Z","lastTransitionTime":"2026-01-28T15:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.086338 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.086418 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.086468 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.086492 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.086508 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.189674 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.189989 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.190087 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.190193 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.190274 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.293246 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.293302 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.293323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.293351 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.293375 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.318705 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 02:43:01.036870917 +0000 UTC Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.397347 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.397423 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.397481 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.397509 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.397525 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.500875 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.500940 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.500957 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.500984 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.501002 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.603005 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.603053 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.603067 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.603083 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.603093 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.706024 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.706061 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.706072 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.706087 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.706098 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.808888 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.808982 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.809004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.809036 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.809055 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.912009 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.912054 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.912065 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.912080 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:01 crc kubenswrapper[4811]: I0128 15:46:01.912093 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:01Z","lastTransitionTime":"2026-01-28T15:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.015856 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.015920 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.015938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.015961 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.015981 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.119033 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.119108 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.119121 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.119147 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.119230 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.222735 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.222790 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.222807 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.222831 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.222851 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.319285 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 19:26:52.413581094 +0000 UTC Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.326061 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.326118 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.326146 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.326170 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.326185 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.338802 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.338948 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:02 crc kubenswrapper[4811]: E0128 15:46:02.339110 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.339145 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.339188 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:02 crc kubenswrapper[4811]: E0128 15:46:02.339327 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:02 crc kubenswrapper[4811]: E0128 15:46:02.339506 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:02 crc kubenswrapper[4811]: E0128 15:46:02.339620 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.429195 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.429267 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.429285 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.429314 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.429332 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.533236 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.533284 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.533293 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.533311 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.533322 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.636101 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.636159 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.636174 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.636195 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.636224 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.738163 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.738207 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.738216 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.738229 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.738238 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.841611 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.841706 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.841724 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.841746 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.841762 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.945487 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.945579 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.945604 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.945647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:02 crc kubenswrapper[4811]: I0128 15:46:02.945678 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:02Z","lastTransitionTime":"2026-01-28T15:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.049494 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.050063 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.050078 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.050112 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.050129 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.152898 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.152964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.152976 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.152999 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.153012 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.256359 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.256422 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.256455 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.256476 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.256489 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.320238 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 02:41:19.578782413 +0000 UTC Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.359921 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.359989 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.360004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.360026 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.360040 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.462892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.462964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.462978 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.463001 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.463016 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.566280 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.566355 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.566373 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.566404 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.566423 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.669325 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.669384 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.669402 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.669461 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.669480 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.773055 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.773138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.773164 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.773200 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.773228 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.876370 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.876499 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.876518 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.876544 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.876566 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.980191 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.980244 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.980257 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.980274 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:03 crc kubenswrapper[4811]: I0128 15:46:03.980288 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:03Z","lastTransitionTime":"2026-01-28T15:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.083666 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.083710 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.083724 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.083741 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.083755 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.186602 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.186671 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.186689 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.186719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.186736 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.289301 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.289362 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.289372 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.289389 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.289398 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.320767 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 00:04:02.515946335 +0000 UTC Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.342773 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.342832 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.342892 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:04 crc kubenswrapper[4811]: E0128 15:46:04.343012 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.343039 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:04 crc kubenswrapper[4811]: E0128 15:46:04.343196 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:04 crc kubenswrapper[4811]: E0128 15:46:04.343474 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:04 crc kubenswrapper[4811]: E0128 15:46:04.343497 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.391590 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.391642 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.391652 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.391672 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.391687 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.494071 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.494135 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.494146 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.494165 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.494179 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.596899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.596967 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.596977 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.597000 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.597011 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.699555 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.699644 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.699667 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.699696 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.699717 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.802291 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.802321 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.802329 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.802342 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.802351 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.905267 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.905313 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.905327 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.905350 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:04 crc kubenswrapper[4811]: I0128 15:46:04.905365 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:04Z","lastTransitionTime":"2026-01-28T15:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.007350 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.007396 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.007407 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.007464 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.007478 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.110006 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.110133 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.110148 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.110170 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.110184 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.213235 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.213305 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.213330 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.213360 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.213375 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.317058 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.317110 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.317121 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.317172 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.317186 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.321523 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 04:36:52.491757051 +0000 UTC Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.419552 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.419618 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.419631 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.419653 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.419668 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.424407 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.424460 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.424470 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.424487 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.424498 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: E0128 15:46:05.435246 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:05Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.438299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.438344 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.438356 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.438375 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.438388 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: E0128 15:46:05.452608 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:05Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.459838 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.459883 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.459894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.459911 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.459924 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: E0128 15:46:05.471477 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:05Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.475866 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.475904 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.475913 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.475930 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.475940 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: E0128 15:46:05.489194 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:05Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.493299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.493331 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.493342 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.493357 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.493368 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: E0128 15:46:05.504505 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:05Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:05 crc kubenswrapper[4811]: E0128 15:46:05.504628 4811 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.521929 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.521975 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.521983 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.521996 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.522006 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.624878 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.624932 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.624942 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.624960 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.624976 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.727821 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.727865 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.727873 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.727889 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.727900 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.830012 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.830068 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.830083 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.830099 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.830111 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.933177 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.933233 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.933246 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.933270 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:05 crc kubenswrapper[4811]: I0128 15:46:05.933284 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:05Z","lastTransitionTime":"2026-01-28T15:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.036253 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.036332 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.036354 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.036384 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.036407 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.138535 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.138566 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.138573 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.138585 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.138593 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.240770 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.240813 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.240824 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.240843 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.240855 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.322088 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 02:43:53.246476749 +0000 UTC Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.338627 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.338696 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:06 crc kubenswrapper[4811]: E0128 15:46:06.338757 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.338781 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.338807 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:06 crc kubenswrapper[4811]: E0128 15:46:06.338941 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:06 crc kubenswrapper[4811]: E0128 15:46:06.338999 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:06 crc kubenswrapper[4811]: E0128 15:46:06.339088 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.342514 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.342546 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.342556 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.342568 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.342576 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.445500 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.445557 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.445571 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.445588 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.445598 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.548970 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.549005 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.549016 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.549030 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.549040 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.650874 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.650917 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.650928 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.650945 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.650958 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.753648 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.753693 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.753704 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.753719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.753729 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.856449 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.856486 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.856495 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.856510 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.856521 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.959830 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.959888 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.959908 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.959939 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:06 crc kubenswrapper[4811]: I0128 15:46:06.959957 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:06Z","lastTransitionTime":"2026-01-28T15:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.062528 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.062580 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.062588 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.062601 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.062612 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.165584 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.165638 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.165650 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.165664 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.165675 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.268229 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.268516 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.268541 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.268571 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.268611 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.323016 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 13:45:53.33105268 +0000 UTC Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.371448 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.371507 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.371520 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.371536 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.371547 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.474143 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.474202 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.474221 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.474246 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.474307 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.580217 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.580257 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.580276 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.580294 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.580307 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.683284 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.683321 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.683330 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.683345 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.683382 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.785955 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.785989 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.786000 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.786014 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.786024 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.888675 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.888741 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.888763 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.888793 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.888819 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.991295 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.991336 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.991350 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.991365 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:07 crc kubenswrapper[4811]: I0128 15:46:07.991378 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:07Z","lastTransitionTime":"2026-01-28T15:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.094768 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.094861 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.094886 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.094919 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.094941 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.197324 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.197419 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.197476 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.197495 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.197507 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.301298 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.301351 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.301365 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.301381 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.301392 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.324083 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 05:51:45.185725023 +0000 UTC Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.339057 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.339057 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.339170 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:08 crc kubenswrapper[4811]: E0128 15:46:08.339286 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.339313 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:08 crc kubenswrapper[4811]: E0128 15:46:08.339524 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:08 crc kubenswrapper[4811]: E0128 15:46:08.339948 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:08 crc kubenswrapper[4811]: E0128 15:46:08.339995 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.352632 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.364808 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.375885 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.388619 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.403674 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.403710 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.403721 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.403736 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.403746 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.410201 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.426389 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.444236 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.457261 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.473947 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.484922 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.498859 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.505899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.505950 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.505964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.505981 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.505992 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.509771 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.520896 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.533502 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.543553 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.561305 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.572579 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.582747 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:08Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.608810 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.608847 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.608856 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.608870 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.608879 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.710804 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.710836 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.710845 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.710857 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.710867 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.813499 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.813544 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.813559 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.813582 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.813598 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.916773 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.916828 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.916846 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.916868 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:08 crc kubenswrapper[4811]: I0128 15:46:08.916884 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:08Z","lastTransitionTime":"2026-01-28T15:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.019026 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.019065 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.019076 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.019118 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.019131 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.123308 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.123367 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.123378 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.123399 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.123413 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.227089 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.227141 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.227169 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.227236 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.227258 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.325166 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 01:39:32.933664649 +0000 UTC Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.330010 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.330047 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.330055 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.330069 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.330079 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.433129 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.433178 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.433196 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.433220 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.433237 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.536518 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.536642 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.536660 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.536683 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.536700 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.638541 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.638579 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.638587 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.638599 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.638608 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.740582 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.740629 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.740647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.740670 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.740689 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.843198 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.843504 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.843667 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.843757 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.843837 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.945607 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.945644 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.945652 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.945664 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:09 crc kubenswrapper[4811]: I0128 15:46:09.945673 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:09Z","lastTransitionTime":"2026-01-28T15:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.047815 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.048071 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.048244 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.048418 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.048614 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.151316 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.151359 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.151371 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.151387 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.151399 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.254146 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.254200 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.254216 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.254236 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.254251 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.325619 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 09:31:45.98745656 +0000 UTC Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.339309 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.339382 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:10 crc kubenswrapper[4811]: E0128 15:46:10.339442 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:10 crc kubenswrapper[4811]: E0128 15:46:10.339535 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.339568 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:10 crc kubenswrapper[4811]: E0128 15:46:10.339662 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.339843 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:10 crc kubenswrapper[4811]: E0128 15:46:10.340072 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.356029 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.356287 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.356806 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.356949 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.357081 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.459378 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.459698 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.459809 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.459995 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.460121 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.563285 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.563600 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.563687 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.563788 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.563879 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.665874 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.665920 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.665929 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.665943 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.665953 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.768969 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.769009 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.769017 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.769030 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.769040 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.871594 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.871646 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.871658 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.871675 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.871688 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.974322 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.974388 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.974406 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.974492 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:10 crc kubenswrapper[4811]: I0128 15:46:10.974542 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:10Z","lastTransitionTime":"2026-01-28T15:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.078473 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.078517 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.078528 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.078543 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.078554 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.181680 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.181726 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.181736 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.181750 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.181760 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.283720 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.283748 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.283755 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.283770 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.283781 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.326855 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 10:51:57.123903239 +0000 UTC Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.340769 4811 scope.go:117] "RemoveContainer" containerID="87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913" Jan 28 15:46:11 crc kubenswrapper[4811]: E0128 15:46:11.341109 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.386941 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.387008 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.387032 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.387063 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.387088 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.489102 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.489136 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.489147 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.489162 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.489174 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.591397 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.591468 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.591479 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.591497 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.591512 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.693960 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.694013 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.694025 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.694042 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.694056 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.796998 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.797042 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.797053 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.797069 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.797080 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.900137 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.900260 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.900299 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.900324 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:11 crc kubenswrapper[4811]: I0128 15:46:11.900349 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:11Z","lastTransitionTime":"2026-01-28T15:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.003062 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.003106 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.003117 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.003145 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.003159 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.106191 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.106266 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.106312 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.106344 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.106369 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.209068 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.209112 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.209128 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.209150 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.209166 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.311159 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.311241 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.311267 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.311297 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.311320 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.327356 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 15:28:00.195328521 +0000 UTC Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.338843 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.338903 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.339036 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:12 crc kubenswrapper[4811]: E0128 15:46:12.339132 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.339153 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:12 crc kubenswrapper[4811]: E0128 15:46:12.339284 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:12 crc kubenswrapper[4811]: E0128 15:46:12.339386 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:12 crc kubenswrapper[4811]: E0128 15:46:12.339545 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.414231 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.414301 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.414319 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.414828 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.414900 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.517605 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.517645 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.517655 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.517671 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.517682 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.620177 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.620208 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.620218 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.620231 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.620241 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.722686 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.722719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.722729 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.722745 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.722775 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.826234 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.826297 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.826323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.826357 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.826381 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.929114 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.929323 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.929353 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.929422 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:12 crc kubenswrapper[4811]: I0128 15:46:12.929503 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:12Z","lastTransitionTime":"2026-01-28T15:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.034272 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.034416 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.034491 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.034530 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.034555 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.137300 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.137336 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.137346 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.137361 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.137372 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.239986 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.240035 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.240047 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.240063 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.240075 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.327852 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 23:00:05.767648031 +0000 UTC Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.342719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.342754 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.342765 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.342779 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.342792 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.445413 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.445479 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.445487 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.445499 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.445508 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.548294 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.548610 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.548726 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.548797 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.548853 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.652064 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.652561 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.652830 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.652946 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.653042 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.755196 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.755237 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.755248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.755264 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.755276 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.857410 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.857771 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.857892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.857993 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.858060 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.909924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:13 crc kubenswrapper[4811]: E0128 15:46:13.910077 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:46:13 crc kubenswrapper[4811]: E0128 15:46:13.910122 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:46:45.910109143 +0000 UTC m=+98.664472726 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.960734 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.960771 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.960782 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.960799 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:13 crc kubenswrapper[4811]: I0128 15:46:13.960809 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:13Z","lastTransitionTime":"2026-01-28T15:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.062702 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.062766 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.062778 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.062810 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.062823 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.164942 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.164999 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.165011 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.165026 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.165038 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.267653 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.267708 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.267719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.267735 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.267748 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.328185 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 13:48:05.487746851 +0000 UTC Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.338748 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.338769 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.338790 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.338786 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:14 crc kubenswrapper[4811]: E0128 15:46:14.339788 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:14 crc kubenswrapper[4811]: E0128 15:46:14.339803 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:14 crc kubenswrapper[4811]: E0128 15:46:14.339849 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:14 crc kubenswrapper[4811]: E0128 15:46:14.339942 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.369697 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.369774 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.369787 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.369807 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.369821 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.472787 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.472827 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.472836 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.472851 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.472860 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.575078 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.575124 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.575134 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.575148 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.575158 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.677307 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.677348 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.677356 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.677370 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.677379 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.779891 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.779926 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.779937 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.779954 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.779964 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.882205 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.882271 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.882286 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.882320 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.882334 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.986034 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.986114 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.986129 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.986157 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:14 crc kubenswrapper[4811]: I0128 15:46:14.986181 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:14Z","lastTransitionTime":"2026-01-28T15:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.089935 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.089978 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.089988 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.090003 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.090012 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.193154 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.193260 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.193289 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.193324 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.193351 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.296882 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.296952 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.296963 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.296981 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.296995 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.329113 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 06:19:04.356922467 +0000 UTC Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.398959 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.399731 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.399867 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.399979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.400068 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.501994 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.502259 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.502369 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.502483 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.502604 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.604473 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.604513 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.604524 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.604539 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.604548 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.706340 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.706397 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.706409 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.706424 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.706457 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.740265 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.740311 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.740320 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.740338 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.740353 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: E0128 15:46:15.757079 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:15Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.760676 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.760715 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.760728 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.760745 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.760761 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: E0128 15:46:15.777459 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:15Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.782829 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.782892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.782905 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.782923 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.782937 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: E0128 15:46:15.797614 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:15Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.801131 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.801163 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.801176 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.801193 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.801204 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: E0128 15:46:15.814108 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:15Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.817071 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.817111 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.817123 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.817138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.817162 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: E0128 15:46:15.827667 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:15Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:15 crc kubenswrapper[4811]: E0128 15:46:15.827798 4811 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.829372 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.829402 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.829414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.829448 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.829461 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.932261 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.932303 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.932314 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.932328 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:15 crc kubenswrapper[4811]: I0128 15:46:15.932339 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:15Z","lastTransitionTime":"2026-01-28T15:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.038524 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.038593 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.038615 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.038643 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.038661 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.140839 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.140871 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.140881 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.140896 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.140906 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.243985 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.244050 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.244063 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.244081 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.244130 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.329747 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 01:48:25.847225889 +0000 UTC Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.339190 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.339236 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:16 crc kubenswrapper[4811]: E0128 15:46:16.339304 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.339190 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:16 crc kubenswrapper[4811]: E0128 15:46:16.339380 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:16 crc kubenswrapper[4811]: E0128 15:46:16.339456 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.339601 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:16 crc kubenswrapper[4811]: E0128 15:46:16.339674 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.345838 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.345867 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.345877 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.345890 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.345900 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.448651 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.448690 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.448702 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.448719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.448732 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.551177 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.551212 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.551224 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.551239 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.551248 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.654198 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.654243 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.654252 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.654269 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.654280 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.731723 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/0.log" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.731837 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc61e84d-a003-46db-924b-7f9ba7460f13" containerID="45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a" exitCode=1 Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.731887 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerDied","Data":"45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.732409 4811 scope.go:117] "RemoveContainer" containerID="45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.757143 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.757370 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.757382 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.757397 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.757406 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.763443 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.778172 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.794413 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.806670 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.819979 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.833974 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.849088 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.859343 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.859380 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.859389 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.859405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.859416 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.862288 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.881331 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.895044 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.918160 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.931366 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.942937 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.953041 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.961827 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.961851 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.961860 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.961873 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.961882 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:16Z","lastTransitionTime":"2026-01-28T15:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.964599 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.977238 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:16 crc kubenswrapper[4811]: I0128 15:46:16.989974 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:16Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.004662 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.064065 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.064111 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.064127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.064146 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.064157 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.165915 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.165955 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.165965 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.165980 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.165992 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.268210 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.268247 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.268256 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.268269 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.268279 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.330408 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 02:07:25.165647239 +0000 UTC Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.371254 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.371324 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.371342 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.371367 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.371389 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.473849 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.473876 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.473884 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.473903 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.473919 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.577090 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.577746 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.577773 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.577792 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.577804 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.680585 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.680638 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.680649 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.680666 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.680677 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.736497 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/0.log" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.736576 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerStarted","Data":"204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.750420 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.763873 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.778221 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.783388 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.783422 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.783458 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.783483 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.783497 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.787468 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.807949 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.819964 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.830517 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.842653 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.851243 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.861260 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.873037 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.884864 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.886143 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.886174 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.886185 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.886200 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.886210 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.896338 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.910688 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.926218 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.938131 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.955303 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.977000 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:17Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.988452 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.988494 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.988503 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.988517 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:17 crc kubenswrapper[4811]: I0128 15:46:17.988538 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:17Z","lastTransitionTime":"2026-01-28T15:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.090705 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.090745 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.090757 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.090773 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.090785 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.194663 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.194701 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.194709 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.194723 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.194735 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.297132 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.297168 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.297176 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.297189 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.297199 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.331518 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 16:15:31.383157223 +0000 UTC Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.338911 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.338974 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:18 crc kubenswrapper[4811]: E0128 15:46:18.339033 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.338922 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:18 crc kubenswrapper[4811]: E0128 15:46:18.339267 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.339288 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:18 crc kubenswrapper[4811]: E0128 15:46:18.339356 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:18 crc kubenswrapper[4811]: E0128 15:46:18.339458 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.363011 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.377035 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.388804 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.397787 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.399103 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.399136 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.399148 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.399161 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.399170 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.410194 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.422185 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.435341 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.444877 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.457123 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.466390 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.484139 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.494092 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.501567 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.501595 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.501605 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.501619 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.501630 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.503105 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.510839 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.519461 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.527475 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.538637 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.550101 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:18Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.604528 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.604572 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.604580 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.604594 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.604603 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.706900 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.706943 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.706952 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.706967 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.706975 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.808676 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.808719 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.808730 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.808746 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.808757 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.910793 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.910833 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.910841 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.910854 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:18 crc kubenswrapper[4811]: I0128 15:46:18.910863 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:18Z","lastTransitionTime":"2026-01-28T15:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.013197 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.013233 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.013244 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.013258 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.013267 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.118327 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.118375 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.118390 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.118408 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.118421 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.220734 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.220812 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.220825 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.220840 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.220852 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.323622 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.323676 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.323688 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.323707 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.323720 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.331889 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 08:39:42.453112427 +0000 UTC Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.425756 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.425802 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.425812 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.425826 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.425835 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.527762 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.527835 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.527861 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.527888 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.527911 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.630953 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.630982 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.630990 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.631001 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.631009 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.733714 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.733780 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.733798 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.733824 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.733842 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.836533 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.836608 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.836621 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.836640 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.836654 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.943530 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.943690 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.943718 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.943735 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:19 crc kubenswrapper[4811]: I0128 15:46:19.943750 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:19Z","lastTransitionTime":"2026-01-28T15:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.046309 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.046370 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.046387 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.046410 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.046426 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.148946 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.149065 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.149084 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.149109 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.149128 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.252293 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.252360 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.252386 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.252414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.252615 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.332541 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 22:10:15.171801834 +0000 UTC Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.338913 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.338951 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.338952 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.338936 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:20 crc kubenswrapper[4811]: E0128 15:46:20.339057 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:20 crc kubenswrapper[4811]: E0128 15:46:20.339139 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:20 crc kubenswrapper[4811]: E0128 15:46:20.339261 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:20 crc kubenswrapper[4811]: E0128 15:46:20.339286 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.355778 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.355809 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.355820 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.355831 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.355842 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.459244 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.459343 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.459356 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.459374 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.459386 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.562369 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.562420 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.562452 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.562471 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.562482 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.664739 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.664778 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.664786 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.664800 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.664810 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.767713 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.767767 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.767777 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.767792 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.767805 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.870278 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.870312 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.870321 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.870334 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.870343 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.973520 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.973566 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.973576 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.973594 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:20 crc kubenswrapper[4811]: I0128 15:46:20.973604 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:20Z","lastTransitionTime":"2026-01-28T15:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.076213 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.076257 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.076266 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.076281 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.076293 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.179063 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.179315 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.179461 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.179746 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.179846 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.283198 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.283248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.283261 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.283280 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.283292 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.332971 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 10:31:06.193453959 +0000 UTC Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.386550 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.386596 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.386610 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.386629 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.386642 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.489010 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.489304 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.489377 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.489466 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.489568 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.592821 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.592870 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.592883 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.592905 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.592914 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.696062 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.696118 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.696134 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.696160 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.696180 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.798556 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.799004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.799084 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.799162 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.799235 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.902343 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.902400 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.902410 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.902447 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:21 crc kubenswrapper[4811]: I0128 15:46:21.902461 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:21Z","lastTransitionTime":"2026-01-28T15:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.005536 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.005644 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.005669 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.005704 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.005728 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.108883 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.108951 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.108961 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.108975 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.108985 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.212264 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.212320 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.212332 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.212352 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.212368 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.315852 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.315898 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.315910 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.315927 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.315939 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.334376 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 13:47:30.896219814 +0000 UTC Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.338760 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.338813 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.338843 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:22 crc kubenswrapper[4811]: E0128 15:46:22.339152 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.339239 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:22 crc kubenswrapper[4811]: E0128 15:46:22.339277 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:22 crc kubenswrapper[4811]: E0128 15:46:22.339377 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:22 crc kubenswrapper[4811]: E0128 15:46:22.339540 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.419405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.419502 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.419516 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.419542 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.419559 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.523358 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.523412 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.523424 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.523466 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.523478 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.626868 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.626927 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.626941 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.626967 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.626982 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.730151 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.730238 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.730260 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.730291 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.730349 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.833303 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.833419 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.833489 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.833521 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.833540 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.936928 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.937453 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.937558 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.937678 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:22 crc kubenswrapper[4811]: I0128 15:46:22.937773 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:22Z","lastTransitionTime":"2026-01-28T15:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.040512 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.040577 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.040594 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.040619 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.040637 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.144157 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.144229 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.144250 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.144280 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.144302 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.247659 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.247729 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.247752 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.247788 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.247813 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.334840 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 11:13:53.753882627 +0000 UTC Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.350235 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.350269 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.350282 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.350298 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.350309 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.452833 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.452904 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.452937 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.452960 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.452976 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.555225 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.555284 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.555298 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.555320 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.555335 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.657970 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.658028 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.658038 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.658056 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.658068 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.761892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.761919 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.761928 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.761940 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.761949 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.864149 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.864188 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.864200 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.864216 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.864228 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.968029 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.968110 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.968127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.968155 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:23 crc kubenswrapper[4811]: I0128 15:46:23.968175 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:23Z","lastTransitionTime":"2026-01-28T15:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.070645 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.070685 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.070696 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.070715 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.070727 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.173365 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.173408 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.173419 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.173461 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.173476 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.275892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.275961 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.275979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.276004 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.276025 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.336027 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 08:51:41.090577907 +0000 UTC Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.339574 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.339759 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.340293 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.340411 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:24 crc kubenswrapper[4811]: E0128 15:46:24.340535 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:24 crc kubenswrapper[4811]: E0128 15:46:24.340728 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:24 crc kubenswrapper[4811]: E0128 15:46:24.340921 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.341058 4811 scope.go:117] "RemoveContainer" containerID="87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913" Jan 28 15:46:24 crc kubenswrapper[4811]: E0128 15:46:24.341188 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.379905 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.379971 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.380012 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.380049 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.380069 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.482729 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.483473 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.483494 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.483528 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.483548 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.586624 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.586647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.586654 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.586666 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.586674 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.689048 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.689079 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.689089 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.689106 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.689119 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.764743 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/2.log" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.767587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.768160 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.790818 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.790855 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.790865 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.790879 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.790889 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.794537 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.812070 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.829257 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.838509 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.860206 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.876902 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.893477 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.893525 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.893534 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.893552 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.893566 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.894360 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.916203 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.934785 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.948492 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.966744 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.979303 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.991263 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:24Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.996050 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.996108 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.996120 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.996142 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:24 crc kubenswrapper[4811]: I0128 15:46:24.996159 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:24Z","lastTransitionTime":"2026-01-28T15:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.002409 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.012090 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.021901 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.036206 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.054533 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.098792 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.098823 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.098832 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.098845 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.098854 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.201615 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.201839 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.201850 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.201866 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.201878 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.322940 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.322964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.322972 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.322986 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.322996 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.336460 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 22:37:03.178549635 +0000 UTC Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.426797 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.426844 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.426880 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.426899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.426911 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.530741 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.530806 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.530830 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.530858 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.530877 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.634458 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.634512 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.634529 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.634550 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.634565 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.737381 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.737418 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.737469 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.737490 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.737504 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.774260 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/3.log" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.777065 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/2.log" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.781415 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" exitCode=1 Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.781471 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.781503 4811 scope.go:117] "RemoveContainer" containerID="87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.782877 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:46:25 crc kubenswrapper[4811]: E0128 15:46:25.783201 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.819506 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.838427 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.840630 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.840750 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.840768 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.840850 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.840875 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.852907 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.865663 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.880880 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.895700 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.907387 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.918308 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.930192 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.944877 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.945066 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.945101 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.945111 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.945129 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.945140 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:25Z","lastTransitionTime":"2026-01-28T15:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.954917 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.972386 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87d2f6f9e01f65f6e0d5755580858274ad796afdce547b78989251f29be3e913\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:45:56Z\\\",\\\"message\\\":\\\":true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0128 15:45:56.487623 6457 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487628 6457 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 in node crc\\\\nI0128 15:45:56.487635 6457 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nI0128 15:45:56.487640 6457 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-4vcr6 after 0 failed attempt(s)\\\\nF0128 15:45:56.487640 6457 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shar\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:25Z\\\",\\\"message\\\":\\\"ork controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z]\\\\nI0128 15:46:25.329679 6855 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0128 15:46:25.329714 6855 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0128 15:46:25.329700 6855 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751c\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.983867 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:25 crc kubenswrapper[4811]: I0128 15:46:25.993971 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.004776 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.014251 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.024861 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.027594 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.027619 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.027628 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.027641 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.027650 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.038864 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.039246 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.042190 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.042228 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.042236 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.042250 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.042260 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.052912 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.055992 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.056021 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.056030 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.056044 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.056056 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.065778 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.068405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.068455 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.068467 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.068482 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.068492 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.078789 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.081820 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.081878 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.081887 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.081899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.081908 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.092211 4811 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0d7027f1-1ac9-4166-9238-d8a50d9d7547\\\",\\\"systemUUID\\\":\\\"abb845de-029b-47c8-9624-111770212716\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.092376 4811 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.093902 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.093928 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.093955 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.093969 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.093979 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.196357 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.196426 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.197079 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.197108 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.197126 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.300317 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.300357 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.300369 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.300385 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.300396 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.337117 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 16:28:38.831747252 +0000 UTC Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.338458 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.338511 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.338458 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.338591 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.338517 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.338816 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.338954 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.339152 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.403571 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.403621 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.403633 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.403651 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.403664 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.506245 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.506310 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.506326 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.506343 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.506355 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.608819 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.608866 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.608877 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.608891 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.608901 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.712275 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.712360 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.712388 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.712420 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.712481 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.788912 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/3.log" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.793878 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:46:26 crc kubenswrapper[4811]: E0128 15:46:26.794222 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.815303 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.816809 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.816894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.816908 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.816924 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.816936 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.830699 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.868623 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:25Z\\\",\\\"message\\\":\\\"ork controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z]\\\\nI0128 15:46:25.329679 6855 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0128 15:46:25.329714 6855 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0128 15:46:25.329700 6855 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751c\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:46:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.894614 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.910862 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.919231 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.919263 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.919273 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.919287 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.919296 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:26Z","lastTransitionTime":"2026-01-28T15:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.923269 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.932910 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.944377 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.954724 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.972268 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:26 crc kubenswrapper[4811]: I0128 15:46:26.986838 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:26Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.002211 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.013092 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.022667 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.022763 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.022785 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.022817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.022837 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.026352 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.040853 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.066417 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.078852 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.091165 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:27Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.124880 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.124936 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.124954 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.124979 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.124997 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.228630 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.228708 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.228730 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.228760 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.228786 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.332032 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.332116 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.332143 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.332175 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.332198 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.338053 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 10:36:03.926404088 +0000 UTC Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.435985 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.436185 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.436202 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.436220 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.436232 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.539502 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.539554 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.539568 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.539587 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.539600 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.642859 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.642923 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.642932 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.642947 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.642957 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.745358 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.745394 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.745403 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.745415 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.745424 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.848335 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.848384 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.848396 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.848414 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.848456 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.951170 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.951221 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.951229 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.951241 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:27 crc kubenswrapper[4811]: I0128 15:46:27.951250 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:27Z","lastTransitionTime":"2026-01-28T15:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.054044 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.054118 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.054138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.054161 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.054177 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.156726 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.156757 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.156766 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.156779 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.156788 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.260058 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.260109 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.260127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.260146 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.260157 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.339154 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 06:22:16.995951879 +0000 UTC Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.339299 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.339364 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.339417 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:28 crc kubenswrapper[4811]: E0128 15:46:28.339418 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.339364 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:28 crc kubenswrapper[4811]: E0128 15:46:28.339612 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:28 crc kubenswrapper[4811]: E0128 15:46:28.339688 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:28 crc kubenswrapper[4811]: E0128 15:46:28.339756 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.354188 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jxfkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5ca7bfb-61e6-40f3-9d7d-7995b948593b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ca864439a935a596c88489c48f5655b602d12d376939a5fd440902e7b220b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbq5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jxfkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.362107 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.362138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.362149 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.362163 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.362173 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.372539 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a95a634-aec8-42cd-889d-ababff22b6f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"ttp/1.1 only\\\\nI0128 15:45:28.245763 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0128 15:45:28.245855 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245897 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0128 15:45:28.245932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0128 15:45:28.245962 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0128 15:45:28.245992 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0128 15:45:28.246021 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0128 15:45:28.249006 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0128 15:45:28.249032 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249055 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0128 15:45:28.249057 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0128 15:45:28.249076 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0128 15:45:28.249041 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF0128 15:45:28.249017 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0128 15:45:28.250658 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1583543770/tls.crt::/tmp/serving-cert-1583543770/tls.key\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.394146 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a5f707-034b-4e7f-97e1-90394f5ce394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e355206a53a3af8e3b780a682191ee9bb1390405b4ad83796b8898de7c10bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67efb98fe17a428b3c598f462677667a3059b7040ae71d2113f4bcc72ce7539\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://981f2546458c3d8f0e324195b358a623026cc110fbc782242031aa3aa41a9ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf028f15fc4a8b52badb3730b45c5c4d5fc51b243e0b611e03f8a83f7c26b01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f666d6245318fe932f7bfa7bda32e87e65a482c79d274b88bcf88c2b49f36de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37825289b6801749895b970e166242d1de98be11526214337837f628756bccd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85c97e89c1c91ccc9707a998daacc43db6e68af7063d928e07921fb02aacf96\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296c52c801967a9cec3f8f433b3b199ec73fc9bd381aa1fbadf51c24443e8660\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.408472 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaa500a2f4b0befe4ab96d61e9f138797a4c370375229210fc2286a8925b15f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://835f897cf48f1505e8769e21b7449dfb516bfb3ff7561709220054c1a9d0d825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.433991 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.455562 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41421640-f722-4edf-a0d5-b120f095246a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b6be9a85b332950bbbde5670c0ed3366ce07d2df9b7a68815b6582e8cf874f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247b77a38d1956e6e6ca91636f42ea42af4b77deedb49a7fd7c80821ffad988d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779c41770168d338f1639cf4a0c7e56ca3639b772c625f0fd8d5724cba7861bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4bb4d3380b541081bccbf7db7f8def31fff8470f51780d54e454f2f190b0928\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6648e5c5d532d261600cc4cf049eb9483cd613dbb747be1ab199c13ece4a2e35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f23ece6a1b464382a3b84fc0613a272a9329e05a80b2a555d41166b9031dbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4614eff3a7d17db76ab5c3b1f4f266465c71e7563f87400aa6dc4d8d3cdee1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ghjz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fbkfh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.463840 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.463975 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.463985 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.464000 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.464011 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.467780 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2a0331a-ea7c-4888-a47e-e73015ab42e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639833caf3540623a605258de6a1ebf453838caf8a5d69ec98522619ecba0a54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6zfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vcr6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.492451 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d3371905-8bb6-433c-b905-43d69e7b382a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:25Z\\\",\\\"message\\\":\\\"ork controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:25Z is after 2025-08-24T17:21:41Z]\\\\nI0128 15:46:25.329679 6855 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0128 15:46:25.329714 6855 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0128 15:46:25.329700 6855 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751c\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:46:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j4mzj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dkzf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.512514 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c725da74-d9b4-4f16-9a21-f9aae2fd05f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63a7b85207f94aa6d9180a5e207421fe63993dbedbac93b11a5b7ad1b23bdd78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ede5aafa1ce2ebfbaa3570ec43084884dc57d2d15dc5d2751c3b5a9072d8dad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c990287111409ab3f1f5210adb2b4da06714d33e695ac76e2f83f40073dfca14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.527712 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45f0fa8e-5670-4ba9-bc3b-868ceb764c20\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6aeff9c9b52faf757610027512ade23bf2c2b89068e97507c51b88d9f3a76c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://566c2ee1b03593202e07f16e8b1a976ed27ca7036a10b96f7eebce30147baf3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59cd87b55159f29922118c39ffc75f0b23d700438e9dd590f82a14c1880ed86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296eeb684557d95622f81bb202a9d96da8036a904b97539ea34b546cfd37f274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-28T15:45:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:08Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.543094 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.555510 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cd547f654d44b4edc1d2f984b3d1e4a972a4d96f62f02429d381cbbe0a00f52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.566514 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.566570 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.566581 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.566597 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.566633 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.567653 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.578065 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fwdqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c138c07-9108-4af5-b2e0-623109bbdf9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a6786216986b5b79f685fddb0711a6c7a735ff6b163a9516399cde7f85a8f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s24s2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fwdqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.590766 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bc743ea95a287501b48d53463d1751e500bf85a1bae2ea5dc2ddf4ad4ad5723\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.603660 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mw4th" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc61e84d-a003-46db-924b-7f9ba7460f13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-28T15:46:16Z\\\",\\\"message\\\":\\\"2026-01-28T15:45:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11\\\\n2026-01-28T15:45:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1836a187-742c-4845-ab73-ea4bd9669a11 to /host/opt/cni/bin/\\\\n2026-01-28T15:45:31Z [verbose] multus-daemon started\\\\n2026-01-28T15:45:31Z [verbose] Readiness Indicator file check\\\\n2026-01-28T15:46:16Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-28T15:45:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zklcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mw4th\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.614786 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac9a279-ca90-41f7-bf8d-d51717483598\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a90ec64eeca27c3b374baa74b14b2380008b4863ad6d3618d55f1dce62ea37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2803e1abf925bb53b578077c2bd42241a09b9c0bd5181c2fab6f0e09fda4ee18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-28T15:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5j4mb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dj8kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.624689 4811 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb3aac9c-e96e-4a5e-beb5-aeff56394467\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T15:45:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x8llm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T15:45:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5mlc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-28T15:46:28Z is after 2025-08-24T17:21:41Z" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.669047 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.669085 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.669093 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.669106 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.669115 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.771887 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.771963 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.771981 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.772586 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.772650 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.875894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.875938 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.875952 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.875973 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.875989 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.979309 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.979412 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.979481 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.979513 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:28 crc kubenswrapper[4811]: I0128 15:46:28.979536 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:28Z","lastTransitionTime":"2026-01-28T15:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.082944 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.082996 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.083016 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.083044 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.083065 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.186673 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.186770 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.186781 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.186797 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.186808 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.289856 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.289907 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.289924 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.289949 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.289968 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.339729 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 02:45:16.637130033 +0000 UTC Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.392639 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.392702 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.392720 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.392743 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.392759 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.495381 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.495465 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.495484 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.495506 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.495523 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.598787 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.598845 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.598869 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.598899 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.598921 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.702722 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.702761 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.702773 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.702791 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.702803 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.804922 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.804982 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.804992 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.805008 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.805018 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.907540 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.907603 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.907629 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.907650 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:29 crc kubenswrapper[4811]: I0128 15:46:29.907665 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:29Z","lastTransitionTime":"2026-01-28T15:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.011310 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.011367 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.011382 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.011470 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.011493 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.115200 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.115259 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.115273 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.115293 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.115312 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.218822 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.218894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.218915 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.218945 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.218966 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.322189 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.322261 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.322275 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.322292 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.322303 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.338933 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.339018 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.339022 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.339098 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:30 crc kubenswrapper[4811]: E0128 15:46:30.339097 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:30 crc kubenswrapper[4811]: E0128 15:46:30.339310 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:30 crc kubenswrapper[4811]: E0128 15:46:30.339462 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:30 crc kubenswrapper[4811]: E0128 15:46:30.339498 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.339940 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 16:37:34.95992761 +0000 UTC Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.425550 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.425628 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.425647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.425674 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.425693 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.528754 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.528822 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.528838 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.528859 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.528874 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.631799 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.631893 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.631928 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.631963 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.632003 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.735996 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.736045 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.736059 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.736079 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.736095 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.838958 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.838993 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.839001 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.839014 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.839022 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.941817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.941865 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.941879 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.941908 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:30 crc kubenswrapper[4811]: I0128 15:46:30.941925 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:30Z","lastTransitionTime":"2026-01-28T15:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.044036 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.044096 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.044108 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.044127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.044139 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.146595 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.146626 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.146636 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.146649 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.146658 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.249248 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.249285 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.249294 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.249309 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.249319 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.340594 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 02:03:00.415052715 +0000 UTC Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.351169 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.351204 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.351212 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.351223 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.351230 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.454710 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.454753 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.454764 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.454779 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.454790 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.557607 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.557751 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.557797 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.558106 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.558153 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.660894 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.660952 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.660964 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.660982 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.660994 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.763604 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.763650 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.763661 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.763683 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.763695 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.866577 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.866627 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.866651 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.866680 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.866701 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.969347 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.969419 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.969472 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.969501 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:31 crc kubenswrapper[4811]: I0128 15:46:31.969521 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:31Z","lastTransitionTime":"2026-01-28T15:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.072405 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.072457 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.072465 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.072480 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.072489 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.174522 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.174628 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.174640 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.174659 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.174669 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.212289 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.212458 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.212550 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.212529699 +0000 UTC m=+148.966893292 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.212685 4811 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.212825 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.212789946 +0000 UTC m=+148.967153569 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.277408 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.277476 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.277485 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.277499 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.277508 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.313381 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.313465 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.313495 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313607 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313623 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313632 4811 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313638 4811 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313672 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313708 4811 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313684 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.313670355 +0000 UTC m=+149.068033938 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313723 4811 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313745 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.313726887 +0000 UTC m=+149.068090540 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.313850 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.31383351 +0000 UTC m=+149.068197193 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.338644 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.338707 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.338787 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.338664 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.338847 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.338948 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.339013 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:32 crc kubenswrapper[4811]: E0128 15:46:32.339077 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.340749 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 19:14:20.233090875 +0000 UTC Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.379619 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.379666 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.379678 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.379695 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.379708 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.481781 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.481815 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.481823 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.481837 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.481849 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.584970 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.585033 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.585050 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.585078 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.585098 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.688399 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.688448 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.688459 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.688473 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.688482 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.791252 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.791300 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.791313 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.791333 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.791355 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.894870 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.894911 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.894920 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.894934 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:32 crc kubenswrapper[4811]: I0128 15:46:32.894943 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:32Z","lastTransitionTime":"2026-01-28T15:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.003161 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.003241 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.003253 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.003276 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.003293 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.106755 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.107228 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.107242 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.107263 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.107276 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.210632 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.210725 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.210751 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.210784 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.210872 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.313374 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.313421 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.313448 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.313467 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.313481 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.341007 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 16:01:20.348086575 +0000 UTC Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.415808 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.416007 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.416038 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.416059 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.416078 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.518565 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.518634 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.518655 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.518687 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.518715 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.621921 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.621994 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.622018 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.622050 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.622072 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.729012 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.729103 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.729127 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.729168 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.729185 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.832535 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.832639 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.832656 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.832683 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.832702 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.935047 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.935098 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.935107 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.935126 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:33 crc kubenswrapper[4811]: I0128 15:46:33.935139 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:33Z","lastTransitionTime":"2026-01-28T15:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.037526 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.037567 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.037578 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.037593 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.037605 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.140750 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.140817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.140834 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.140856 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.140874 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.244815 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.244892 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.244908 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.244935 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.244955 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.339250 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.339257 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:34 crc kubenswrapper[4811]: E0128 15:46:34.339487 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:34 crc kubenswrapper[4811]: E0128 15:46:34.339592 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.339771 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:34 crc kubenswrapper[4811]: E0128 15:46:34.339968 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.340014 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:34 crc kubenswrapper[4811]: E0128 15:46:34.340202 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.341167 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 07:24:56.268506639 +0000 UTC Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.348091 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.348193 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.348260 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.348331 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.348355 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.451510 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.451548 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.451559 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.451576 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.451588 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.554268 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.554393 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.554504 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.554565 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.554592 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.657560 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.657617 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.657628 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.657652 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.657664 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.761106 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.761160 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.761172 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.761191 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.761204 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.863943 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.863983 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.863992 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.864007 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.864018 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.967053 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.967103 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.967118 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.967138 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:34 crc kubenswrapper[4811]: I0128 15:46:34.967149 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:34Z","lastTransitionTime":"2026-01-28T15:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.070830 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.070917 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.070935 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.070961 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.070978 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.173675 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.173728 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.173874 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.173912 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.173930 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.277009 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.277092 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.277110 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.277135 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.277152 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.341999 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 18:54:04.691311279 +0000 UTC Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.380205 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.380267 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.380283 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.380306 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.380324 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.483082 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.483158 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.483182 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.483213 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.483230 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.586570 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.586618 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.586629 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.586647 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.586662 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.688291 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.688331 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.688339 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.688351 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.688359 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.791944 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.792396 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.792413 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.792461 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.792478 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.895817 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.895882 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.895901 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.895931 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.895944 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.998661 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.998742 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.998766 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.998795 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:35 crc kubenswrapper[4811]: I0128 15:46:35.998817 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:35Z","lastTransitionTime":"2026-01-28T15:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.101320 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.101360 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.101368 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.101401 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.101411 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:36Z","lastTransitionTime":"2026-01-28T15:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.160048 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.160095 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.160107 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.160125 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.160138 4811 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-28T15:46:36Z","lastTransitionTime":"2026-01-28T15:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.219856 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6"] Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.220316 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.222136 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.223549 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.223825 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.223992 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.278004 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-jxfkb" podStartSLOduration=69.277986727 podStartE2EDuration="1m9.277986727s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.263534166 +0000 UTC m=+89.017897789" watchObservedRunningTime="2026-01-28 15:46:36.277986727 +0000 UTC m=+89.032350310" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.278259 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=68.278255244 podStartE2EDuration="1m8.278255244s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.277701169 +0000 UTC m=+89.032064762" watchObservedRunningTime="2026-01-28 15:46:36.278255244 +0000 UTC m=+89.032618827" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.303100 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.303081753 podStartE2EDuration="1m8.303081753s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.302418265 +0000 UTC m=+89.056781888" watchObservedRunningTime="2026-01-28 15:46:36.303081753 +0000 UTC m=+89.057445336" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.339171 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.339215 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.339215 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.339257 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:36 crc kubenswrapper[4811]: E0128 15:46:36.339290 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:36 crc kubenswrapper[4811]: E0128 15:46:36.339424 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:36 crc kubenswrapper[4811]: E0128 15:46:36.339485 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:36 crc kubenswrapper[4811]: E0128 15:46:36.339533 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.342116 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 04:59:33.56159289 +0000 UTC Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.342175 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.344223 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-fbkfh" podStartSLOduration=68.344207205 podStartE2EDuration="1m8.344207205s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.342310752 +0000 UTC m=+89.096674355" watchObservedRunningTime="2026-01-28 15:46:36.344207205 +0000 UTC m=+89.098570788" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.351594 4811 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.357480 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4a043b9c-8026-441a-88db-d6ea1a9f57fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.357524 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4a043b9c-8026-441a-88db-d6ea1a9f57fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.357553 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a043b9c-8026-441a-88db-d6ea1a9f57fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.357576 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4a043b9c-8026-441a-88db-d6ea1a9f57fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.357643 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a043b9c-8026-441a-88db-d6ea1a9f57fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.358587 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podStartSLOduration=69.358568073 podStartE2EDuration="1m9.358568073s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.358546923 +0000 UTC m=+89.112910496" watchObservedRunningTime="2026-01-28 15:46:36.358568073 +0000 UTC m=+89.112931656" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.393031 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=62.393010128 podStartE2EDuration="1m2.393010128s" podCreationTimestamp="2026-01-28 15:45:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.392576827 +0000 UTC m=+89.146940430" watchObservedRunningTime="2026-01-28 15:46:36.393010128 +0000 UTC m=+89.147373711" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.407202 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=41.407183392 podStartE2EDuration="41.407183392s" podCreationTimestamp="2026-01-28 15:45:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.406566135 +0000 UTC m=+89.160929748" watchObservedRunningTime="2026-01-28 15:46:36.407183392 +0000 UTC m=+89.161546975" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.427740 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-fwdqb" podStartSLOduration=69.427719572 podStartE2EDuration="1m9.427719572s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.427505066 +0000 UTC m=+89.181868669" watchObservedRunningTime="2026-01-28 15:46:36.427719572 +0000 UTC m=+89.182083165" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.458880 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4a043b9c-8026-441a-88db-d6ea1a9f57fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.458926 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4a043b9c-8026-441a-88db-d6ea1a9f57fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.458952 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a043b9c-8026-441a-88db-d6ea1a9f57fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.458971 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4a043b9c-8026-441a-88db-d6ea1a9f57fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.459040 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4a043b9c-8026-441a-88db-d6ea1a9f57fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.459079 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4a043b9c-8026-441a-88db-d6ea1a9f57fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.459576 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a043b9c-8026-441a-88db-d6ea1a9f57fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.459887 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4a043b9c-8026-441a-88db-d6ea1a9f57fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.464207 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a043b9c-8026-441a-88db-d6ea1a9f57fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.475397 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a043b9c-8026-441a-88db-d6ea1a9f57fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cpch6\" (UID: \"4a043b9c-8026-441a-88db-d6ea1a9f57fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.504385 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-mw4th" podStartSLOduration=69.504366829 podStartE2EDuration="1m9.504366829s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.494819334 +0000 UTC m=+89.249182917" watchObservedRunningTime="2026-01-28 15:46:36.504366829 +0000 UTC m=+89.258730412" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.505089 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dj8kl" podStartSLOduration=68.505086108 podStartE2EDuration="1m8.505086108s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.503758062 +0000 UTC m=+89.258121645" watchObservedRunningTime="2026-01-28 15:46:36.505086108 +0000 UTC m=+89.259449691" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.547009 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.832224 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" event={"ID":"4a043b9c-8026-441a-88db-d6ea1a9f57fc","Type":"ContainerStarted","Data":"836b14fa13eb2594edb30b4c07f1c5452a781ecd4ffa6c039b920141aecf13c9"} Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.832295 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" event={"ID":"4a043b9c-8026-441a-88db-d6ea1a9f57fc","Type":"ContainerStarted","Data":"73804e4fe875eddcf6063ccf7dc266a143ce00b60f55a4289ea7d3f37e43c953"} Jan 28 15:46:36 crc kubenswrapper[4811]: I0128 15:46:36.851411 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cpch6" podStartSLOduration=69.851381378 podStartE2EDuration="1m9.851381378s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:36.850537364 +0000 UTC m=+89.604900947" watchObservedRunningTime="2026-01-28 15:46:36.851381378 +0000 UTC m=+89.605745061" Jan 28 15:46:37 crc kubenswrapper[4811]: I0128 15:46:37.351156 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 28 15:46:38 crc kubenswrapper[4811]: I0128 15:46:38.339327 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:38 crc kubenswrapper[4811]: I0128 15:46:38.339350 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:38 crc kubenswrapper[4811]: I0128 15:46:38.339410 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:38 crc kubenswrapper[4811]: E0128 15:46:38.342317 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:38 crc kubenswrapper[4811]: I0128 15:46:38.342385 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:38 crc kubenswrapper[4811]: E0128 15:46:38.342528 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:38 crc kubenswrapper[4811]: E0128 15:46:38.342637 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:38 crc kubenswrapper[4811]: E0128 15:46:38.342726 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:38 crc kubenswrapper[4811]: I0128 15:46:38.356741 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.356712058 podStartE2EDuration="1.356712058s" podCreationTimestamp="2026-01-28 15:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:46:38.352670146 +0000 UTC m=+91.107033719" watchObservedRunningTime="2026-01-28 15:46:38.356712058 +0000 UTC m=+91.111075691" Jan 28 15:46:40 crc kubenswrapper[4811]: I0128 15:46:40.371751 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:40 crc kubenswrapper[4811]: I0128 15:46:40.371837 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:40 crc kubenswrapper[4811]: I0128 15:46:40.371862 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:40 crc kubenswrapper[4811]: E0128 15:46:40.372258 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:40 crc kubenswrapper[4811]: E0128 15:46:40.372484 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:40 crc kubenswrapper[4811]: I0128 15:46:40.372503 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:40 crc kubenswrapper[4811]: E0128 15:46:40.372581 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:40 crc kubenswrapper[4811]: I0128 15:46:40.372717 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:46:40 crc kubenswrapper[4811]: E0128 15:46:40.372735 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:40 crc kubenswrapper[4811]: E0128 15:46:40.372909 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:46:42 crc kubenswrapper[4811]: I0128 15:46:42.338960 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:42 crc kubenswrapper[4811]: I0128 15:46:42.339006 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:42 crc kubenswrapper[4811]: I0128 15:46:42.338991 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:42 crc kubenswrapper[4811]: I0128 15:46:42.339132 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:42 crc kubenswrapper[4811]: E0128 15:46:42.339319 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:42 crc kubenswrapper[4811]: E0128 15:46:42.339423 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:42 crc kubenswrapper[4811]: E0128 15:46:42.339567 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:42 crc kubenswrapper[4811]: E0128 15:46:42.339760 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:44 crc kubenswrapper[4811]: I0128 15:46:44.338558 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:44 crc kubenswrapper[4811]: I0128 15:46:44.338626 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:44 crc kubenswrapper[4811]: I0128 15:46:44.339592 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:44 crc kubenswrapper[4811]: I0128 15:46:44.339620 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:44 crc kubenswrapper[4811]: E0128 15:46:44.338773 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:44 crc kubenswrapper[4811]: E0128 15:46:44.339824 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:44 crc kubenswrapper[4811]: E0128 15:46:44.339960 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:44 crc kubenswrapper[4811]: E0128 15:46:44.340053 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:45 crc kubenswrapper[4811]: I0128 15:46:45.922266 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:45 crc kubenswrapper[4811]: E0128 15:46:45.922502 4811 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:46:45 crc kubenswrapper[4811]: E0128 15:46:45.922650 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs podName:bb3aac9c-e96e-4a5e-beb5-aeff56394467 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:49.922621478 +0000 UTC m=+162.676985071 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs") pod "network-metrics-daemon-t5mlc" (UID: "bb3aac9c-e96e-4a5e-beb5-aeff56394467") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 28 15:46:46 crc kubenswrapper[4811]: I0128 15:46:46.339060 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:46 crc kubenswrapper[4811]: I0128 15:46:46.339195 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:46 crc kubenswrapper[4811]: I0128 15:46:46.339233 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:46 crc kubenswrapper[4811]: E0128 15:46:46.339320 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:46 crc kubenswrapper[4811]: I0128 15:46:46.339386 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:46 crc kubenswrapper[4811]: E0128 15:46:46.339470 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:46 crc kubenswrapper[4811]: E0128 15:46:46.339793 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:46 crc kubenswrapper[4811]: E0128 15:46:46.339869 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:48 crc kubenswrapper[4811]: I0128 15:46:48.338547 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:48 crc kubenswrapper[4811]: I0128 15:46:48.338616 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:48 crc kubenswrapper[4811]: I0128 15:46:48.338637 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:48 crc kubenswrapper[4811]: I0128 15:46:48.338672 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:48 crc kubenswrapper[4811]: E0128 15:46:48.340637 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:48 crc kubenswrapper[4811]: E0128 15:46:48.340788 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:48 crc kubenswrapper[4811]: E0128 15:46:48.340889 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:48 crc kubenswrapper[4811]: E0128 15:46:48.341035 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:50 crc kubenswrapper[4811]: I0128 15:46:50.339159 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:50 crc kubenswrapper[4811]: I0128 15:46:50.339251 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:50 crc kubenswrapper[4811]: I0128 15:46:50.339243 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:50 crc kubenswrapper[4811]: I0128 15:46:50.339183 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:50 crc kubenswrapper[4811]: E0128 15:46:50.339368 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:50 crc kubenswrapper[4811]: E0128 15:46:50.339552 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:50 crc kubenswrapper[4811]: E0128 15:46:50.339633 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:50 crc kubenswrapper[4811]: E0128 15:46:50.339727 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:52 crc kubenswrapper[4811]: I0128 15:46:52.339507 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:52 crc kubenswrapper[4811]: I0128 15:46:52.339582 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:52 crc kubenswrapper[4811]: I0128 15:46:52.339548 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:52 crc kubenswrapper[4811]: I0128 15:46:52.339420 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:52 crc kubenswrapper[4811]: E0128 15:46:52.339806 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:52 crc kubenswrapper[4811]: E0128 15:46:52.339947 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:52 crc kubenswrapper[4811]: E0128 15:46:52.340100 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:52 crc kubenswrapper[4811]: E0128 15:46:52.340400 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:54 crc kubenswrapper[4811]: I0128 15:46:54.338551 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:54 crc kubenswrapper[4811]: I0128 15:46:54.338706 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:54 crc kubenswrapper[4811]: E0128 15:46:54.338806 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:54 crc kubenswrapper[4811]: I0128 15:46:54.338839 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:54 crc kubenswrapper[4811]: I0128 15:46:54.338866 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:54 crc kubenswrapper[4811]: E0128 15:46:54.338991 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:54 crc kubenswrapper[4811]: E0128 15:46:54.339010 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:54 crc kubenswrapper[4811]: E0128 15:46:54.339069 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:55 crc kubenswrapper[4811]: I0128 15:46:55.342875 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:46:55 crc kubenswrapper[4811]: E0128 15:46:55.343573 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dkzf6_openshift-ovn-kubernetes(d3371905-8bb6-433c-b905-43d69e7b382a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" Jan 28 15:46:56 crc kubenswrapper[4811]: I0128 15:46:56.339390 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:56 crc kubenswrapper[4811]: I0128 15:46:56.339535 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:56 crc kubenswrapper[4811]: E0128 15:46:56.339740 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:56 crc kubenswrapper[4811]: I0128 15:46:56.339813 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:56 crc kubenswrapper[4811]: I0128 15:46:56.339827 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:56 crc kubenswrapper[4811]: E0128 15:46:56.339911 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:56 crc kubenswrapper[4811]: E0128 15:46:56.340152 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:46:56 crc kubenswrapper[4811]: E0128 15:46:56.340935 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:58 crc kubenswrapper[4811]: I0128 15:46:58.339962 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:46:58 crc kubenswrapper[4811]: I0128 15:46:58.340023 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:46:58 crc kubenswrapper[4811]: I0128 15:46:58.339966 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:46:58 crc kubenswrapper[4811]: I0128 15:46:58.340070 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:46:58 crc kubenswrapper[4811]: E0128 15:46:58.342287 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:46:58 crc kubenswrapper[4811]: E0128 15:46:58.342418 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:46:58 crc kubenswrapper[4811]: E0128 15:46:58.342789 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:46:58 crc kubenswrapper[4811]: E0128 15:46:58.342961 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:00 crc kubenswrapper[4811]: I0128 15:47:00.339357 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:00 crc kubenswrapper[4811]: I0128 15:47:00.339421 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:00 crc kubenswrapper[4811]: I0128 15:47:00.339369 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:00 crc kubenswrapper[4811]: I0128 15:47:00.339480 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:00 crc kubenswrapper[4811]: E0128 15:47:00.339597 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:00 crc kubenswrapper[4811]: E0128 15:47:00.339735 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:00 crc kubenswrapper[4811]: E0128 15:47:00.339833 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:00 crc kubenswrapper[4811]: E0128 15:47:00.339984 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.338504 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.338565 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.338631 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:02 crc kubenswrapper[4811]: E0128 15:47:02.338752 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.338796 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:02 crc kubenswrapper[4811]: E0128 15:47:02.338897 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:02 crc kubenswrapper[4811]: E0128 15:47:02.338980 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:02 crc kubenswrapper[4811]: E0128 15:47:02.339063 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.924007 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/1.log" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.924889 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/0.log" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.924971 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc61e84d-a003-46db-924b-7f9ba7460f13" containerID="204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33" exitCode=1 Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.925029 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerDied","Data":"204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33"} Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.925106 4811 scope.go:117] "RemoveContainer" containerID="45a7964de3a938d569e2a2b2cb37b74b66df82f830bb5b6830f4c80f6b0c869a" Jan 28 15:47:02 crc kubenswrapper[4811]: I0128 15:47:02.925703 4811 scope.go:117] "RemoveContainer" containerID="204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33" Jan 28 15:47:02 crc kubenswrapper[4811]: E0128 15:47:02.926027 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-mw4th_openshift-multus(dc61e84d-a003-46db-924b-7f9ba7460f13)\"" pod="openshift-multus/multus-mw4th" podUID="dc61e84d-a003-46db-924b-7f9ba7460f13" Jan 28 15:47:03 crc kubenswrapper[4811]: I0128 15:47:03.930075 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/1.log" Jan 28 15:47:04 crc kubenswrapper[4811]: I0128 15:47:04.339253 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:04 crc kubenswrapper[4811]: I0128 15:47:04.339336 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:04 crc kubenswrapper[4811]: E0128 15:47:04.339402 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:04 crc kubenswrapper[4811]: I0128 15:47:04.339272 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:04 crc kubenswrapper[4811]: E0128 15:47:04.339507 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:04 crc kubenswrapper[4811]: E0128 15:47:04.339554 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:04 crc kubenswrapper[4811]: I0128 15:47:04.339593 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:04 crc kubenswrapper[4811]: E0128 15:47:04.339647 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:06 crc kubenswrapper[4811]: I0128 15:47:06.338947 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:06 crc kubenswrapper[4811]: I0128 15:47:06.339022 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:06 crc kubenswrapper[4811]: I0128 15:47:06.338947 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:06 crc kubenswrapper[4811]: E0128 15:47:06.339128 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:06 crc kubenswrapper[4811]: E0128 15:47:06.339248 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:06 crc kubenswrapper[4811]: I0128 15:47:06.339124 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:06 crc kubenswrapper[4811]: E0128 15:47:06.339466 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:06 crc kubenswrapper[4811]: E0128 15:47:06.339544 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:08 crc kubenswrapper[4811]: I0128 15:47:08.339577 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:08 crc kubenswrapper[4811]: I0128 15:47:08.339741 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:08 crc kubenswrapper[4811]: I0128 15:47:08.342198 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:08 crc kubenswrapper[4811]: E0128 15:47:08.342178 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:08 crc kubenswrapper[4811]: I0128 15:47:08.342253 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:08 crc kubenswrapper[4811]: E0128 15:47:08.342337 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:08 crc kubenswrapper[4811]: E0128 15:47:08.342823 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:08 crc kubenswrapper[4811]: E0128 15:47:08.342936 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:08 crc kubenswrapper[4811]: E0128 15:47:08.360917 4811 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 28 15:47:08 crc kubenswrapper[4811]: E0128 15:47:08.427511 4811 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.338969 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:10 crc kubenswrapper[4811]: E0128 15:47:10.339709 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.339028 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.339027 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.338972 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:10 crc kubenswrapper[4811]: E0128 15:47:10.339989 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:10 crc kubenswrapper[4811]: E0128 15:47:10.339836 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:10 crc kubenswrapper[4811]: E0128 15:47:10.339913 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.339838 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.953156 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/3.log" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.956189 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerStarted","Data":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.956747 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:47:10 crc kubenswrapper[4811]: I0128 15:47:10.995824 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podStartSLOduration=102.995620442 podStartE2EDuration="1m42.995620442s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:10.995568 +0000 UTC m=+123.749931583" watchObservedRunningTime="2026-01-28 15:47:10.995620442 +0000 UTC m=+123.749984025" Jan 28 15:47:11 crc kubenswrapper[4811]: I0128 15:47:11.424787 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-t5mlc"] Jan 28 15:47:11 crc kubenswrapper[4811]: I0128 15:47:11.424909 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:11 crc kubenswrapper[4811]: E0128 15:47:11.425018 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:12 crc kubenswrapper[4811]: I0128 15:47:12.338469 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:12 crc kubenswrapper[4811]: E0128 15:47:12.338910 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:12 crc kubenswrapper[4811]: I0128 15:47:12.338636 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:12 crc kubenswrapper[4811]: E0128 15:47:12.338987 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:12 crc kubenswrapper[4811]: I0128 15:47:12.338527 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:12 crc kubenswrapper[4811]: E0128 15:47:12.339033 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:13 crc kubenswrapper[4811]: I0128 15:47:13.338804 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:13 crc kubenswrapper[4811]: E0128 15:47:13.339936 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:13 crc kubenswrapper[4811]: E0128 15:47:13.428453 4811 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 28 15:47:14 crc kubenswrapper[4811]: I0128 15:47:14.338857 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:14 crc kubenswrapper[4811]: I0128 15:47:14.338924 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:14 crc kubenswrapper[4811]: I0128 15:47:14.338865 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:14 crc kubenswrapper[4811]: E0128 15:47:14.339056 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:14 crc kubenswrapper[4811]: E0128 15:47:14.339204 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:14 crc kubenswrapper[4811]: E0128 15:47:14.339290 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:15 crc kubenswrapper[4811]: I0128 15:47:15.338946 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:15 crc kubenswrapper[4811]: E0128 15:47:15.339147 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:16 crc kubenswrapper[4811]: I0128 15:47:16.339659 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:16 crc kubenswrapper[4811]: I0128 15:47:16.339659 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:16 crc kubenswrapper[4811]: E0128 15:47:16.339935 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:16 crc kubenswrapper[4811]: E0128 15:47:16.339821 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:16 crc kubenswrapper[4811]: I0128 15:47:16.339939 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:16 crc kubenswrapper[4811]: E0128 15:47:16.340315 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:17 crc kubenswrapper[4811]: I0128 15:47:17.338840 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:17 crc kubenswrapper[4811]: I0128 15:47:17.339583 4811 scope.go:117] "RemoveContainer" containerID="204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33" Jan 28 15:47:17 crc kubenswrapper[4811]: E0128 15:47:17.339628 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:17 crc kubenswrapper[4811]: I0128 15:47:17.983208 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/1.log" Jan 28 15:47:17 crc kubenswrapper[4811]: I0128 15:47:17.983546 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerStarted","Data":"b9e6fde8714d52f3ab851df784d7d4129ecbcd7b592df17623bd72ae460ef5d9"} Jan 28 15:47:18 crc kubenswrapper[4811]: I0128 15:47:18.338637 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:18 crc kubenswrapper[4811]: I0128 15:47:18.338708 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:18 crc kubenswrapper[4811]: I0128 15:47:18.338817 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:18 crc kubenswrapper[4811]: E0128 15:47:18.339726 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:18 crc kubenswrapper[4811]: E0128 15:47:18.339858 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:18 crc kubenswrapper[4811]: E0128 15:47:18.339978 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:18 crc kubenswrapper[4811]: E0128 15:47:18.429223 4811 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 28 15:47:19 crc kubenswrapper[4811]: I0128 15:47:19.338841 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:19 crc kubenswrapper[4811]: E0128 15:47:19.339047 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:20 crc kubenswrapper[4811]: I0128 15:47:20.339021 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:20 crc kubenswrapper[4811]: I0128 15:47:20.339084 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:20 crc kubenswrapper[4811]: E0128 15:47:20.339385 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:20 crc kubenswrapper[4811]: I0128 15:47:20.339554 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:20 crc kubenswrapper[4811]: E0128 15:47:20.339599 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:20 crc kubenswrapper[4811]: E0128 15:47:20.340012 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:21 crc kubenswrapper[4811]: I0128 15:47:21.339150 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:21 crc kubenswrapper[4811]: E0128 15:47:21.339355 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:22 crc kubenswrapper[4811]: I0128 15:47:22.339335 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:22 crc kubenswrapper[4811]: I0128 15:47:22.339394 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:22 crc kubenswrapper[4811]: I0128 15:47:22.339329 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:22 crc kubenswrapper[4811]: E0128 15:47:22.339584 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 28 15:47:22 crc kubenswrapper[4811]: E0128 15:47:22.339663 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 28 15:47:22 crc kubenswrapper[4811]: E0128 15:47:22.339771 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 28 15:47:23 crc kubenswrapper[4811]: I0128 15:47:23.339240 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:23 crc kubenswrapper[4811]: E0128 15:47:23.339402 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5mlc" podUID="bb3aac9c-e96e-4a5e-beb5-aeff56394467" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.339183 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.339273 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.339273 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.341132 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.345070 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.345191 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 28 15:47:24 crc kubenswrapper[4811]: I0128 15:47:24.345283 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 28 15:47:25 crc kubenswrapper[4811]: I0128 15:47:25.339104 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:25 crc kubenswrapper[4811]: I0128 15:47:25.341269 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 28 15:47:25 crc kubenswrapper[4811]: I0128 15:47:25.341366 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.292316 4811 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.345838 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.346720 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.348003 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.348673 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.352535 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bg96p"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.353463 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.354459 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkd9p"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.355254 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.356152 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.356240 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.356304 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.360976 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.361712 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.362090 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.366417 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.368847 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.387589 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.388615 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.388641 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.389839 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390001 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390111 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390275 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390389 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390475 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390567 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390584 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390685 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390785 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390792 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.390893 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dgnfz"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.391237 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.391355 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.391444 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.391538 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.391604 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.392130 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.392275 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.392572 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.392841 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.400518 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.401117 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.401139 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.401713 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.401898 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.401167 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.402243 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.403097 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.403399 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.403968 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.405684 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.406227 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-xcx2z"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.406588 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.406970 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.407453 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.407852 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.406628 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.409241 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hcsxq"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.409797 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.409918 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.410200 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vffxs"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.410402 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.410632 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.410691 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.410786 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.412011 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.412228 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.412601 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.415216 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.415782 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.417252 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.417487 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.418753 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.418911 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419030 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419401 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419526 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419611 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419702 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419808 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419856 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419904 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.419987 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.420167 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.420592 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.420786 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.420974 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-27m45"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.426124 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.429086 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-c4zgz"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.429887 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.437502 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.437723 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.437825 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.437943 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.444246 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.444900 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.445246 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.445639 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.445983 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.437944 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.446642 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.447057 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.447386 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.447712 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.448798 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.450393 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.452753 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453046 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453245 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453297 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453461 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453583 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453798 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.453935 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.455263 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.455869 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.456114 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.457281 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.457390 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.458610 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.462056 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.463515 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.471888 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.473095 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-image-import-ca\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.485960 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-encryption-config\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.486113 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77wzd\" (UniqueName: \"kubernetes.io/projected/1167ed60-0864-42bb-aba1-48ba9ee03867-kube-api-access-77wzd\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.486256 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-dir\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.486461 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-serving-cert\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.486599 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.486753 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487104 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-config\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487260 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzp6s\" (UniqueName: \"kubernetes.io/projected/3e446987-0b7c-46f9-8207-dfe3e84240e4-kube-api-access-rzp6s\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487390 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-audit-dir\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487543 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487648 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec729dda-4b2d-458b-8e11-6cc8beacb717-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487748 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-trusted-ca\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487847 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.487937 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488035 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c386978b-8bd6-432e-b6df-9cacf8acd0ae-machine-approver-tls\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488153 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488250 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-config\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488353 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-encryption-config\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488384 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488400 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-audit-policies\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488416 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd67p\" (UniqueName: \"kubernetes.io/projected/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-kube-api-access-jd67p\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488448 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-serving-cert\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488462 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-oauth-serving-cert\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488476 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1167ed60-0864-42bb-aba1-48ba9ee03867-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488492 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-etcd-client\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488507 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whdh7\" (UniqueName: \"kubernetes.io/projected/f28ff403-47a1-47de-b3f4-f6519b75064f-kube-api-access-whdh7\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488531 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-service-ca\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488545 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwjps\" (UniqueName: \"kubernetes.io/projected/cfa466bf-9337-45cd-a739-c0d3b5521e13-kube-api-access-hwjps\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488570 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488588 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22z5q\" (UniqueName: \"kubernetes.io/projected/797642e6-462e-434f-952f-ea096d1a4e47-kube-api-access-22z5q\") pod \"cluster-samples-operator-665b6dd947-tb2z9\" (UID: \"797642e6-462e-434f-952f-ea096d1a4e47\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488604 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488619 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-config\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488633 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wfgc\" (UniqueName: \"kubernetes.io/projected/4141dca3-d75b-4da2-ad3d-47ab847ff59b-kube-api-access-2wfgc\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488651 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bfeb8a90-cb53-45c4-8650-1931e986a5f3-audit-dir\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488665 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcwmq\" (UniqueName: \"kubernetes.io/projected/c386978b-8bd6-432e-b6df-9cacf8acd0ae-kube-api-access-wcwmq\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488679 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-policies\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488702 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488715 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-oauth-config\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488729 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-trusted-ca-bundle\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488746 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-config\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488760 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c386978b-8bd6-432e-b6df-9cacf8acd0ae-auth-proxy-config\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488775 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488789 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-tls\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488804 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8cm9\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-kube-api-access-z8cm9\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488822 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8x42\" (UniqueName: \"kubernetes.io/projected/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-kube-api-access-h8x42\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488839 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-serving-cert\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488851 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7pnz\" (UniqueName: \"kubernetes.io/projected/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-kube-api-access-r7pnz\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488866 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488882 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-service-ca-bundle\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488898 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488911 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488926 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e446987-0b7c-46f9-8207-dfe3e84240e4-serving-cert\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488950 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j76cb\" (UniqueName: \"kubernetes.io/projected/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-kube-api-access-j76cb\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488964 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4141dca3-d75b-4da2-ad3d-47ab847ff59b-config\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.488976 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4141dca3-d75b-4da2-ad3d-47ab847ff59b-serving-cert\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489011 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-etcd-client\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489026 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489039 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-config\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489052 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-certificates\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489073 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-audit\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489087 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-serving-cert\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489106 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489120 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-bound-sa-token\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489136 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bfeb8a90-cb53-45c4-8650-1931e986a5f3-node-pullsecrets\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489152 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq5vq\" (UniqueName: \"kubernetes.io/projected/bfeb8a90-cb53-45c4-8650-1931e986a5f3-kube-api-access-dq5vq\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489190 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489205 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1167ed60-0864-42bb-aba1-48ba9ee03867-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489229 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489242 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-client-ca\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489255 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-serving-cert\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489280 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec729dda-4b2d-458b-8e11-6cc8beacb717-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489301 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c386978b-8bd6-432e-b6df-9cacf8acd0ae-config\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489317 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4141dca3-d75b-4da2-ad3d-47ab847ff59b-trusted-ca\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489334 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/797642e6-462e-434f-952f-ea096d1a4e47-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tb2z9\" (UID: \"797642e6-462e-434f-952f-ea096d1a4e47\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489348 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1167ed60-0864-42bb-aba1-48ba9ee03867-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.489370 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.489918 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:27.989897268 +0000 UTC m=+140.744260851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.490150 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.490266 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.490822 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-dssbm"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.491104 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.491376 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.493490 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.493996 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.494026 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.494682 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.495131 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.495485 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.495827 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.496078 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.496088 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.496692 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.496827 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.496814 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.498043 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.498110 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jtvpg"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.498913 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.499373 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.500237 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lmds9"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.500822 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.500828 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.504261 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.517493 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.517592 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.527600 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-87g76"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.528358 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.528800 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.528876 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.537997 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.542717 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.543445 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.543549 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.543996 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.544383 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.544710 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.544999 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.545389 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.548279 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.548898 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.551535 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.551879 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.552532 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-4fj7f"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.552899 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nzw9g"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.553346 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.553692 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.584778 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.585260 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.585443 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.586513 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.590087 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jvb2s"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.591106 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.597820 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.598535 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.598880 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.599838 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.600020 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.099990423 +0000 UTC m=+140.854354006 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600533 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600576 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-encryption-config\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-audit-policies\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600641 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd67p\" (UniqueName: \"kubernetes.io/projected/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-kube-api-access-jd67p\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600677 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef30f9b4-f11c-4149-a320-539203ac5666-config\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600709 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9763c54a-c0e1-4f5f-969e-ea775fdfa175-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600744 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1167ed60-0864-42bb-aba1-48ba9ee03867-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600774 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-etcd-client\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600798 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-service-ca\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600850 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l889n\" (UniqueName: \"kubernetes.io/projected/f2e6c078-567b-4c22-aece-ebc4f195e9a2-kube-api-access-l889n\") pod \"package-server-manager-789f6589d5-xg589\" (UID: \"f2e6c078-567b-4c22-aece-ebc4f195e9a2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600884 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600914 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zp2x\" (UniqueName: \"kubernetes.io/projected/02bc99e0-e3a2-4c8b-b936-b5d7535412d0-kube-api-access-6zp2x\") pod \"downloads-7954f5f757-dssbm\" (UID: \"02bc99e0-e3a2-4c8b-b936-b5d7535412d0\") " pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600961 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef30f9b4-f11c-4149-a320-539203ac5666-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.600990 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-config\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.601033 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv27t\" (UniqueName: \"kubernetes.io/projected/9cea0670-a73c-4ef8-962e-ac12bdef4283-kube-api-access-nv27t\") pod \"dns-operator-744455d44c-lmds9\" (UID: \"9cea0670-a73c-4ef8-962e-ac12bdef4283\") " pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.601064 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5167f6-623a-4b81-ad25-0e866528fd94-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.601162 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.601208 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-service-ca\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.602001 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-service-ca\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.601972 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-oauth-config\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.602496 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-audit-policies\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.602637 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-trusted-ca-bundle\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.603271 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.603578 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-tls\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.603646 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.603675 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8x42\" (UniqueName: \"kubernetes.io/projected/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-kube-api-access-h8x42\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.603725 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-serving-cert\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.603749 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-service-ca-bundle\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.604623 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gf4gj"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.604682 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-service-ca-bundle\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.606371 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-trusted-ca-bundle\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.607316 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-encryption-config\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.608524 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-oauth-config\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609174 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609201 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e446987-0b7c-46f9-8207-dfe3e84240e4-serving-cert\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609228 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c5167f6-623a-4b81-ad25-0e866528fd94-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609251 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0439276f-d9ad-40f1-aa18-d05a1ee2b768-config\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609271 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br7bb\" (UniqueName: \"kubernetes.io/projected/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-kube-api-access-br7bb\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609303 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4141dca3-d75b-4da2-ad3d-47ab847ff59b-config\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609324 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl6jz\" (UniqueName: \"kubernetes.io/projected/b3262acb-1ffb-43da-81c2-8271c1e8acfa-kube-api-access-bl6jz\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609371 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-bound-sa-token\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1167ed60-0864-42bb-aba1-48ba9ee03867-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609413 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq5vq\" (UniqueName: \"kubernetes.io/projected/bfeb8a90-cb53-45c4-8650-1931e986a5f3-kube-api-access-dq5vq\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.609459 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611039 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-client-ca\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611078 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-serving-cert\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611101 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgvkd\" (UniqueName: \"kubernetes.io/projected/1cc605f1-f5a1-4f06-b05c-fb0881684445-kube-api-access-tgvkd\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611121 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e6c2fc-f99a-443f-a0c6-165478a1e838-serving-cert\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611155 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c386978b-8bd6-432e-b6df-9cacf8acd0ae-config\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611578 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611632 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-tls\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611661 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c386978b-8bd6-432e-b6df-9cacf8acd0ae-config\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.611982 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4141dca3-d75b-4da2-ad3d-47ab847ff59b-trusted-ca\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.612095 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.612516 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4141dca3-d75b-4da2-ad3d-47ab847ff59b-config\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.613193 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1167ed60-0864-42bb-aba1-48ba9ee03867-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.613359 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4141dca3-d75b-4da2-ad3d-47ab847ff59b-trusted-ca\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614153 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-client-ca\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614184 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614289 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/797642e6-462e-434f-952f-ea096d1a4e47-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tb2z9\" (UID: \"797642e6-462e-434f-952f-ea096d1a4e47\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614343 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1167ed60-0864-42bb-aba1-48ba9ee03867-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614407 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpd5p\" (UniqueName: \"kubernetes.io/projected/f4a001af-cfdf-49ed-9d59-646a1b5683da-kube-api-access-rpd5p\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614459 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-image-import-ca\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614503 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77wzd\" (UniqueName: \"kubernetes.io/projected/1167ed60-0864-42bb-aba1-48ba9ee03867-kube-api-access-77wzd\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614530 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-dir\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614555 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0439276f-d9ad-40f1-aa18-d05a1ee2b768-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614600 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9763c54a-c0e1-4f5f-969e-ea775fdfa175-proxy-tls\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614608 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-serving-cert\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614618 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614635 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9cea0670-a73c-4ef8-962e-ac12bdef4283-metrics-tls\") pod \"dns-operator-744455d44c-lmds9\" (UID: \"9cea0670-a73c-4ef8-962e-ac12bdef4283\") " pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614652 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-cert\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614892 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-dir\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614903 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-config\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614942 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q46tb\" (UniqueName: \"kubernetes.io/projected/6c6e1eda-08d6-4430-8d7c-64c859d57f60-kube-api-access-q46tb\") pod \"migrator-59844c95c7-47dsj\" (UID: \"6c6e1eda-08d6-4430-8d7c-64c859d57f60\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.614962 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615126 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615161 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-config\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615300 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615326 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615365 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c386978b-8bd6-432e-b6df-9cacf8acd0ae-machine-approver-tls\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615423 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615454 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615470 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ktm6\" (UniqueName: \"kubernetes.io/projected/c5e6c2fc-f99a-443f-a0c6-165478a1e838-kube-api-access-9ktm6\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615497 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-key\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615524 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-serving-cert\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615576 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-oauth-serving-cert\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615587 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615595 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3262acb-1ffb-43da-81c2-8271c1e8acfa-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615645 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.615669 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.616144 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.116132222 +0000 UTC m=+140.870495805 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616300 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616544 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whdh7\" (UniqueName: \"kubernetes.io/projected/f28ff403-47a1-47de-b3f4-f6519b75064f-kube-api-access-whdh7\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616564 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwjps\" (UniqueName: \"kubernetes.io/projected/cfa466bf-9337-45cd-a739-c0d3b5521e13-kube-api-access-hwjps\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616609 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-config\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616644 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-serving-cert\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616701 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22z5q\" (UniqueName: \"kubernetes.io/projected/797642e6-462e-434f-952f-ea096d1a4e47-kube-api-access-22z5q\") pod \"cluster-samples-operator-665b6dd947-tb2z9\" (UID: \"797642e6-462e-434f-952f-ea096d1a4e47\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616744 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-config\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616928 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wfgc\" (UniqueName: \"kubernetes.io/projected/4141dca3-d75b-4da2-ad3d-47ab847ff59b-kube-api-access-2wfgc\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.616998 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-etcd-client\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617029 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c8aea032-97b6-474a-a09f-b04c638a8a54-profile-collector-cert\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617625 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-config\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617684 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef30f9b4-f11c-4149-a320-539203ac5666-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617705 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g77dg\" (UniqueName: \"kubernetes.io/projected/9763c54a-c0e1-4f5f-969e-ea775fdfa175-kube-api-access-g77dg\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617877 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-config\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617942 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bfeb8a90-cb53-45c4-8650-1931e986a5f3-audit-dir\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.617984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcwmq\" (UniqueName: \"kubernetes.io/projected/c386978b-8bd6-432e-b6df-9cacf8acd0ae-kube-api-access-wcwmq\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.618555 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-oauth-serving-cert\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.618847 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.618901 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-policies\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.618906 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bg96p"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.618968 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bfeb8a90-cb53-45c4-8650-1931e986a5f3-audit-dir\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619126 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4a001af-cfdf-49ed-9d59-646a1b5683da-serving-cert\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619244 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-client\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619275 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxgb7\" (UniqueName: \"kubernetes.io/projected/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-kube-api-access-xxgb7\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619319 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-config\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619343 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c386978b-8bd6-432e-b6df-9cacf8acd0ae-auth-proxy-config\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619368 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619416 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/797642e6-462e-434f-952f-ea096d1a4e47-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tb2z9\" (UID: \"797642e6-462e-434f-952f-ea096d1a4e47\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619496 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8cm9\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-kube-api-access-z8cm9\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619556 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c8aea032-97b6-474a-a09f-b04c638a8a54-srv-cert\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619584 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619628 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3262acb-1ffb-43da-81c2-8271c1e8acfa-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619767 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.619977 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620023 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-serving-cert\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620028 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620229 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-config\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620274 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7pnz\" (UniqueName: \"kubernetes.io/projected/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-kube-api-access-r7pnz\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620315 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c386978b-8bd6-432e-b6df-9cacf8acd0ae-auth-proxy-config\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620377 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620442 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-metrics-tls\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620585 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j76cb\" (UniqueName: \"kubernetes.io/projected/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-kube-api-access-j76cb\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620646 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4141dca3-d75b-4da2-ad3d-47ab847ff59b-serving-cert\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620770 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-262pc\" (UniqueName: \"kubernetes.io/projected/c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f-kube-api-access-262pc\") pod \"control-plane-machine-set-operator-78cbb6b69f-shsbw\" (UID: \"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620894 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cc605f1-f5a1-4f06-b05c-fb0881684445-service-ca-bundle\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620932 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c5167f6-623a-4b81-ad25-0e866528fd94-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620966 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-etcd-client\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.620990 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621014 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-config\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621083 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-certificates\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621111 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-audit\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621135 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-serving-cert\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621160 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-shsbw\" (UID: \"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621187 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6kk5\" (UniqueName: \"kubernetes.io/projected/e40716fd-b721-424a-84d9-935b50543fd9-kube-api-access-m6kk5\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621227 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621237 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0439276f-d9ad-40f1-aa18-d05a1ee2b768-images\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621262 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9763c54a-c0e1-4f5f-969e-ea775fdfa175-images\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621825 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e446987-0b7c-46f9-8207-dfe3e84240e4-config\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.621862 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.622069 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.622153 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.622188 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e40716fd-b721-424a-84d9-935b50543fd9-serving-cert\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.622211 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-default-certificate\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.622408 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.623356 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-image-import-ca\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.623654 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.623925 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bfeb8a90-cb53-45c4-8650-1931e986a5f3-node-pullsecrets\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.623990 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bfeb8a90-cb53-45c4-8650-1931e986a5f3-node-pullsecrets\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624022 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624059 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n5fd\" (UniqueName: \"kubernetes.io/projected/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-kube-api-access-7n5fd\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624083 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e40716fd-b721-424a-84d9-935b50543fd9-config\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624108 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624174 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec729dda-4b2d-458b-8e11-6cc8beacb717-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624203 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-secret-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.624968 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c386978b-8bd6-432e-b6df-9cacf8acd0ae-machine-approver-tls\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625090 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-etcd-client\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625184 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-config\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625229 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625274 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625444 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-ca\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625496 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-cabundle\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625515 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7426\" (UniqueName: \"kubernetes.io/projected/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-kube-api-access-h7426\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625524 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625547 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-metrics-certs\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625585 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-encryption-config\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-trusted-ca\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625664 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-config\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625684 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2e6c078-567b-4c22-aece-ebc4f195e9a2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xg589\" (UID: \"f2e6c078-567b-4c22-aece-ebc4f195e9a2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.625891 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bfeb8a90-cb53-45c4-8650-1931e986a5f3-audit\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626017 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-stats-auth\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626018 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626041 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-serving-cert\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626081 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626116 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626279 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4141dca3-d75b-4da2-ad3d-47ab847ff59b-serving-cert\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626373 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzp6s\" (UniqueName: \"kubernetes.io/projected/3e446987-0b7c-46f9-8207-dfe3e84240e4-kube-api-access-rzp6s\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626488 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-client-ca\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626629 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkgl9\" (UniqueName: \"kubernetes.io/projected/0439276f-d9ad-40f1-aa18-d05a1ee2b768-kube-api-access-dkgl9\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626739 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-audit-dir\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626772 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-audit-dir\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.627338 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.626848 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phr9d\" (UniqueName: \"kubernetes.io/projected/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-kube-api-access-phr9d\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.627872 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec729dda-4b2d-458b-8e11-6cc8beacb717-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.627980 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-trusted-ca\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.628077 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.628187 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhlsj\" (UniqueName: \"kubernetes.io/projected/c8aea032-97b6-474a-a09f-b04c638a8a54-kube-api-access-bhlsj\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.628242 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec729dda-4b2d-458b-8e11-6cc8beacb717-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.627979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1167ed60-0864-42bb-aba1-48ba9ee03867-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.629284 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-trusted-ca\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.630177 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-certificates\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.631570 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.631650 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.632638 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.633702 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-policies\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.635781 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfeb8a90-cb53-45c4-8650-1931e986a5f3-serving-cert\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.636875 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.636893 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec729dda-4b2d-458b-8e11-6cc8beacb717-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.636881 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.637229 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-serving-cert\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.638234 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.638787 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e446987-0b7c-46f9-8207-dfe3e84240e4-serving-cert\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.640742 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.645256 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-27m45"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.645740 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-encryption-config\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.649882 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.650841 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.653041 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.656347 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.658159 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-c4zgz"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.658416 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hcsxq"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.659910 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkd9p"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.661011 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dgnfz"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.662334 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.663695 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.664852 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4fj7f"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.665993 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xcx2z"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.667561 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.668836 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.668842 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vffxs"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.670474 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jvb2s"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.671551 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.672678 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-lqmnb"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.673248 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.673928 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q66fr"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.674743 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.675102 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.676379 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dssbm"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.677683 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nzw9g"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.679210 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.680837 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gf4gj"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.682102 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.683489 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.685137 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.686790 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.698030 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.700001 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.703077 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jtvpg"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.704072 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.705515 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lmds9"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.706506 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.707781 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.708817 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.709310 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q66fr"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.710684 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.712107 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-lb8jv"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.713488 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-lb8jv"] Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.713591 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.728937 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.729102 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.229078675 +0000 UTC m=+140.983442258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729191 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n5fd\" (UniqueName: \"kubernetes.io/projected/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-kube-api-access-7n5fd\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729228 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e40716fd-b721-424a-84d9-935b50543fd9-config\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729252 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729278 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-secret-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729299 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-config\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729329 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjksn\" (UniqueName: \"kubernetes.io/projected/86879674-3f52-439f-ab05-e2cfc81edf75-kube-api-access-xjksn\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729358 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729381 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-ca\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729405 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-cabundle\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729427 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7426\" (UniqueName: \"kubernetes.io/projected/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-kube-api-access-h7426\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729469 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-metrics-certs\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729494 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729517 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-trusted-ca\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729539 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-config\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729561 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2e6c078-567b-4c22-aece-ebc4f195e9a2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xg589\" (UID: \"f2e6c078-567b-4c22-aece-ebc4f195e9a2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729612 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729693 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-stats-auth\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729726 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-client-ca\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729744 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkgl9\" (UniqueName: \"kubernetes.io/projected/0439276f-d9ad-40f1-aa18-d05a1ee2b768-kube-api-access-dkgl9\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729763 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phr9d\" (UniqueName: \"kubernetes.io/projected/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-kube-api-access-phr9d\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729798 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rcfb\" (UniqueName: \"kubernetes.io/projected/104b0b18-f5c1-4257-b6e0-39ca8f30378c-kube-api-access-9rcfb\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729818 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhlsj\" (UniqueName: \"kubernetes.io/projected/c8aea032-97b6-474a-a09f-b04c638a8a54-kube-api-access-bhlsj\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729858 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729949 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef30f9b4-f11c-4149-a320-539203ac5666-config\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729970 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9763c54a-c0e1-4f5f-969e-ea775fdfa175-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.729995 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l889n\" (UniqueName: \"kubernetes.io/projected/f2e6c078-567b-4c22-aece-ebc4f195e9a2-kube-api-access-l889n\") pod \"package-server-manager-789f6589d5-xg589\" (UID: \"f2e6c078-567b-4c22-aece-ebc4f195e9a2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730012 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/104b0b18-f5c1-4257-b6e0-39ca8f30378c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730032 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zp2x\" (UniqueName: \"kubernetes.io/projected/02bc99e0-e3a2-4c8b-b936-b5d7535412d0-kube-api-access-6zp2x\") pod \"downloads-7954f5f757-dssbm\" (UID: \"02bc99e0-e3a2-4c8b-b936-b5d7535412d0\") " pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730057 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef30f9b4-f11c-4149-a320-539203ac5666-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730090 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-config\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730118 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv27t\" (UniqueName: \"kubernetes.io/projected/9cea0670-a73c-4ef8-962e-ac12bdef4283-kube-api-access-nv27t\") pod \"dns-operator-744455d44c-lmds9\" (UID: \"9cea0670-a73c-4ef8-962e-ac12bdef4283\") " pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730135 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5167f6-623a-4b81-ad25-0e866528fd94-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730154 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-service-ca\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730182 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730209 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730234 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f25hd\" (UniqueName: \"kubernetes.io/projected/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-kube-api-access-f25hd\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730261 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c5167f6-623a-4b81-ad25-0e866528fd94-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730277 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0439276f-d9ad-40f1-aa18-d05a1ee2b768-config\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730295 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br7bb\" (UniqueName: \"kubernetes.io/projected/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-kube-api-access-br7bb\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730336 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl6jz\" (UniqueName: \"kubernetes.io/projected/b3262acb-1ffb-43da-81c2-8271c1e8acfa-kube-api-access-bl6jz\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730355 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-certs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730381 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/104b0b18-f5c1-4257-b6e0-39ca8f30378c-proxy-tls\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730440 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgvkd\" (UniqueName: \"kubernetes.io/projected/1cc605f1-f5a1-4f06-b05c-fb0881684445-kube-api-access-tgvkd\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730459 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e6c2fc-f99a-443f-a0c6-165478a1e838-serving-cert\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730537 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpd5p\" (UniqueName: \"kubernetes.io/projected/f4a001af-cfdf-49ed-9d59-646a1b5683da-kube-api-access-rpd5p\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730563 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0439276f-d9ad-40f1-aa18-d05a1ee2b768-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9763c54a-c0e1-4f5f-969e-ea775fdfa175-proxy-tls\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730596 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730612 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-srv-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730630 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9cea0670-a73c-4ef8-962e-ac12bdef4283-metrics-tls\") pod \"dns-operator-744455d44c-lmds9\" (UID: \"9cea0670-a73c-4ef8-962e-ac12bdef4283\") " pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-cert\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730694 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-node-bootstrap-token\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730722 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q46tb\" (UniqueName: \"kubernetes.io/projected/6c6e1eda-08d6-4430-8d7c-64c859d57f60-kube-api-access-q46tb\") pod \"migrator-59844c95c7-47dsj\" (UID: \"6c6e1eda-08d6-4430-8d7c-64c859d57f60\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730746 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7fcs\" (UniqueName: \"kubernetes.io/projected/d0d13dd3-5445-427c-81f2-ddf4eed82723-kube-api-access-w7fcs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730766 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ktm6\" (UniqueName: \"kubernetes.io/projected/c5e6c2fc-f99a-443f-a0c6-165478a1e838-kube-api-access-9ktm6\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730785 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-key\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730802 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3262acb-1ffb-43da-81c2-8271c1e8acfa-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730821 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730837 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.730859 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pswnx\" (UniqueName: \"kubernetes.io/projected/264ad201-9fa4-497f-a8c1-5f17c22ed862-kube-api-access-pswnx\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.731349 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-client-ca\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.731980 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/264ad201-9fa4-497f-a8c1-5f17c22ed862-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732105 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c8aea032-97b6-474a-a09f-b04c638a8a54-profile-collector-cert\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732131 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef30f9b4-f11c-4149-a320-539203ac5666-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732150 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g77dg\" (UniqueName: \"kubernetes.io/projected/9763c54a-c0e1-4f5f-969e-ea775fdfa175-kube-api-access-g77dg\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732192 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4a001af-cfdf-49ed-9d59-646a1b5683da-serving-cert\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732208 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-client\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732224 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxgb7\" (UniqueName: \"kubernetes.io/projected/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-kube-api-access-xxgb7\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732249 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c8aea032-97b6-474a-a09f-b04c638a8a54-srv-cert\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732269 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3262acb-1ffb-43da-81c2-8271c1e8acfa-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732315 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-metrics-tls\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732399 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-262pc\" (UniqueName: \"kubernetes.io/projected/c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f-kube-api-access-262pc\") pod \"control-plane-machine-set-operator-78cbb6b69f-shsbw\" (UID: \"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732476 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cc605f1-f5a1-4f06-b05c-fb0881684445-service-ca-bundle\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732509 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732568 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c5167f6-623a-4b81-ad25-0e866528fd94-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732625 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-shsbw\" (UID: \"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732659 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6kk5\" (UniqueName: \"kubernetes.io/projected/e40716fd-b721-424a-84d9-935b50543fd9-kube-api-access-m6kk5\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732708 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0439276f-d9ad-40f1-aa18-d05a1ee2b768-images\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732735 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9763c54a-c0e1-4f5f-969e-ea775fdfa175-images\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732742 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732786 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e40716fd-b721-424a-84d9-935b50543fd9-serving-cert\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732818 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-default-certificate\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.732835 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9763c54a-c0e1-4f5f-969e-ea775fdfa175-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.733066 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5167f6-623a-4b81-ad25-0e866528fd94-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.733267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-config\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.733387 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.733954 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef30f9b4-f11c-4149-a320-539203ac5666-config\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.734236 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0439276f-d9ad-40f1-aa18-d05a1ee2b768-config\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.734341 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.234322991 +0000 UTC m=+140.988686574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.734420 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.735844 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0439276f-d9ad-40f1-aa18-d05a1ee2b768-images\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.736340 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e6c2fc-f99a-443f-a0c6-165478a1e838-serving-cert\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.736368 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef30f9b4-f11c-4149-a320-539203ac5666-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.736561 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c5167f6-623a-4b81-ad25-0e866528fd94-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.738262 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0439276f-d9ad-40f1-aa18-d05a1ee2b768-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.749149 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.769545 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.789417 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.809495 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.828839 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.833354 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.833567 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.333540774 +0000 UTC m=+141.087904357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.833659 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.833717 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pswnx\" (UniqueName: \"kubernetes.io/projected/264ad201-9fa4-497f-a8c1-5f17c22ed862-kube-api-access-pswnx\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.833753 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/264ad201-9fa4-497f-a8c1-5f17c22ed862-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.833890 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.833996 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.333980716 +0000 UTC m=+141.088344299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834046 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjksn\" (UniqueName: \"kubernetes.io/projected/86879674-3f52-439f-ab05-e2cfc81edf75-kube-api-access-xjksn\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834143 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834204 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rcfb\" (UniqueName: \"kubernetes.io/projected/104b0b18-f5c1-4257-b6e0-39ca8f30378c-kube-api-access-9rcfb\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834251 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834326 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/104b0b18-f5c1-4257-b6e0-39ca8f30378c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834372 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f25hd\" (UniqueName: \"kubernetes.io/projected/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-kube-api-access-f25hd\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834460 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-certs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834499 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/104b0b18-f5c1-4257-b6e0-39ca8f30378c-proxy-tls\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834608 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-srv-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834648 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-node-bootstrap-token\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.834703 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7fcs\" (UniqueName: \"kubernetes.io/projected/d0d13dd3-5445-427c-81f2-ddf4eed82723-kube-api-access-w7fcs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.835422 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/104b0b18-f5c1-4257-b6e0-39ca8f30378c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.855744 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.861510 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-trusted-ca\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.869487 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.890230 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.896702 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-client\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.909767 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.929169 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.936121 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.936306 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.436268944 +0000 UTC m=+141.190632527 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.936583 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.936840 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-metrics-tls\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:27 crc kubenswrapper[4811]: E0128 15:47:27.936964 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.436954254 +0000 UTC m=+141.191318047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.949836 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.952028 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-config\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.969158 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.971356 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-ca\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:27 crc kubenswrapper[4811]: I0128 15:47:27.990252 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.010693 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.012349 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4a001af-cfdf-49ed-9d59-646a1b5683da-etcd-service-ca\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.029760 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.037561 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4a001af-cfdf-49ed-9d59-646a1b5683da-serving-cert\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.038101 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.038313 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.538283565 +0000 UTC m=+141.292647158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.038580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.038937 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.538921673 +0000 UTC m=+141.293285266 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.049376 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.070343 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.090143 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.109929 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.116164 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9cea0670-a73c-4ef8-962e-ac12bdef4283-metrics-tls\") pod \"dns-operator-744455d44c-lmds9\" (UID: \"9cea0670-a73c-4ef8-962e-ac12bdef4283\") " pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.130272 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.140481 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.140714 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.640687516 +0000 UTC m=+141.395051109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.141188 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.141723 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.641708115 +0000 UTC m=+141.396071708 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.150656 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.189628 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.210264 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.230011 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.242940 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.243110 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.743087028 +0000 UTC m=+141.497450621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.243582 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.243995 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.743983783 +0000 UTC m=+141.498347386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.249761 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.257253 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-default-certificate\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.270530 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.289867 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.295881 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-stats-auth\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.309682 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.314976 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2e6c078-567b-4c22-aece-ebc4f195e9a2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xg589\" (UID: \"f2e6c078-567b-4c22-aece-ebc4f195e9a2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.329743 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.334378 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cc605f1-f5a1-4f06-b05c-fb0881684445-metrics-certs\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.344534 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.344739 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.844711338 +0000 UTC m=+141.599074931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.345498 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.345815 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.845800558 +0000 UTC m=+141.600164141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.349357 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.369358 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.375797 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cc605f1-f5a1-4f06-b05c-fb0881684445-service-ca-bundle\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.389803 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.409745 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.430820 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.445989 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.446341 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.946295266 +0000 UTC m=+141.700658899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.447166 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.447476 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:28.947461629 +0000 UTC m=+141.701825212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.451322 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.457800 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3262acb-1ffb-43da-81c2-8271c1e8acfa-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.469508 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.473343 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3262acb-1ffb-43da-81c2-8271c1e8acfa-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.489891 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.509888 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.517415 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-shsbw\" (UID: \"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.529606 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.534485 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9763c54a-c0e1-4f5f-969e-ea775fdfa175-images\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.547738 4811 request.go:700] Waited for 1.002754444s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/secrets?fieldSelector=metadata.name%3Dcontrol-plane-machine-set-operator-dockercfg-k9rxt&limit=500&resourceVersion=0 Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.549418 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.549483 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.549755 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.049732756 +0000 UTC m=+141.804096359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.550306 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.550761 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.050746835 +0000 UTC m=+141.805110418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.569826 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.590407 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.597264 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9763c54a-c0e1-4f5f-969e-ea775fdfa175-proxy-tls\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.610033 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.613801 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-config\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.629868 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.650379 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.651763 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.652682 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.152656553 +0000 UTC m=+141.907020156 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.657044 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.669974 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.689473 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.694444 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-secret-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.697238 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.697420 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c8aea032-97b6-474a-a09f-b04c638a8a54-profile-collector-cert\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.710714 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.718042 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c8aea032-97b6-474a-a09f-b04c638a8a54-srv-cert\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.729860 4811 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.729893 4811 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.729949 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume podName:78a73da1-92b0-4724-90aa-1a8f5aa3e2ec nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.229921917 +0000 UTC m=+141.984285510 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume") pod "collect-profiles-29493585-8md6j" (UID: "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec") : failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.729957 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.729996 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e40716fd-b721-424a-84d9-935b50543fd9-config podName:e40716fd-b721-424a-84d9-935b50543fd9 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.229970768 +0000 UTC m=+141.984334441 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e40716fd-b721-424a-84d9-935b50543fd9-config") pod "service-ca-operator-777779d784-5t2fc" (UID: "e40716fd-b721-424a-84d9-935b50543fd9") : failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.730403 4811 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.730553 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-cabundle podName:c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.230535573 +0000 UTC m=+141.984899166 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-cabundle") pod "service-ca-9c57cc56f-nzw9g" (UID: "c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695") : failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.733617 4811 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.733690 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-key podName:c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.233670801 +0000 UTC m=+141.988034574 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-key") pod "service-ca-9c57cc56f-nzw9g" (UID: "c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.733721 4811 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.733761 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e40716fd-b721-424a-84d9-935b50543fd9-serving-cert podName:e40716fd-b721-424a-84d9-935b50543fd9 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.233749053 +0000 UTC m=+141.988112886 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/e40716fd-b721-424a-84d9-935b50543fd9-serving-cert") pod "service-ca-operator-777779d784-5t2fc" (UID: "e40716fd-b721-424a-84d9-935b50543fd9") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.734314 4811 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.734378 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-cert podName:4a5edfb4-e7c7-45b3-aada-9f579b2f8434 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.23436367 +0000 UTC m=+141.988727473 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-cert") pod "ingress-canary-4fj7f" (UID: "4a5edfb4-e7c7-45b3-aada-9f579b2f8434") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.748715 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.754710 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.755276 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.255254279 +0000 UTC m=+142.009617902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.769007 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.789041 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.809224 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.829053 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834164 4811 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834275 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca podName:7b0b6c1f-a9c1-41dc-8495-7646a81b616b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.334251542 +0000 UTC m=+142.088615135 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca") pod "marketplace-operator-79b997595-jvb2s" (UID: "7b0b6c1f-a9c1-41dc-8495-7646a81b616b") : failed to sync configmap cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834497 4811 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834599 4811 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834644 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-certs podName:d0d13dd3-5445-427c-81f2-ddf4eed82723 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.334632282 +0000 UTC m=+142.088995875 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-certs") pod "machine-config-server-lqmnb" (UID: "d0d13dd3-5445-427c-81f2-ddf4eed82723") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834690 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics podName:7b0b6c1f-a9c1-41dc-8495-7646a81b616b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.334653503 +0000 UTC m=+142.089017086 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics") pod "marketplace-operator-79b997595-jvb2s" (UID: "7b0b6c1f-a9c1-41dc-8495-7646a81b616b") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834705 4811 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834743 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/104b0b18-f5c1-4257-b6e0-39ca8f30378c-proxy-tls podName:104b0b18-f5c1-4257-b6e0-39ca8f30378c nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.334733825 +0000 UTC m=+142.089097418 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/104b0b18-f5c1-4257-b6e0-39ca8f30378c-proxy-tls") pod "machine-config-controller-84d6567774-ttn2c" (UID: "104b0b18-f5c1-4257-b6e0-39ca8f30378c") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834768 4811 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834782 4811 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834804 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-srv-cert podName:86879674-3f52-439f-ab05-e2cfc81edf75 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.334797247 +0000 UTC m=+142.089160840 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-srv-cert") pod "olm-operator-6b444d44fb-bx5jn" (UID: "86879674-3f52-439f-ab05-e2cfc81edf75") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834881 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-node-bootstrap-token podName:d0d13dd3-5445-427c-81f2-ddf4eed82723 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.334860408 +0000 UTC m=+142.089224031 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-node-bootstrap-token") pod "machine-config-server-lqmnb" (UID: "d0d13dd3-5445-427c-81f2-ddf4eed82723") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.834957 4811 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.835126 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/264ad201-9fa4-497f-a8c1-5f17c22ed862-webhook-certs podName:264ad201-9fa4-497f-a8c1-5f17c22ed862 nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.335023053 +0000 UTC m=+142.089386646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/264ad201-9fa4-497f-a8c1-5f17c22ed862-webhook-certs") pod "multus-admission-controller-857f4d67dd-gf4gj" (UID: "264ad201-9fa4-497f-a8c1-5f17c22ed862") : failed to sync secret cache: timed out waiting for the condition Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.850502 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.855882 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.856083 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.356062587 +0000 UTC m=+142.110426180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.856526 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.856939 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.356928471 +0000 UTC m=+142.111292064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.869892 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.890127 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.910694 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.932474 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.948888 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.958645 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.958920 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.45888474 +0000 UTC m=+142.213248353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.959518 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:28 crc kubenswrapper[4811]: E0128 15:47:28.959948 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.459933029 +0000 UTC m=+142.214296652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.969890 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 28 15:47:28 crc kubenswrapper[4811]: I0128 15:47:28.989252 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.010323 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.029320 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.050332 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.060722 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.060956 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.560922261 +0000 UTC m=+142.315285844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.061191 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.061646 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.561626741 +0000 UTC m=+142.315990334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.069543 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.089725 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.118219 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.130042 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.149556 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.162661 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.162896 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.66286709 +0000 UTC m=+142.417230673 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.163288 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.163673 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.663658952 +0000 UTC m=+142.418022545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.196039 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1167ed60-0864-42bb-aba1-48ba9ee03867-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.209490 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd67p\" (UniqueName: \"kubernetes.io/projected/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-kube-api-access-jd67p\") pod \"route-controller-manager-6576b87f9c-dwxpb\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.229492 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8x42\" (UniqueName: \"kubernetes.io/projected/c2b17da8-0524-4615-a9e5-f1c59a0fde0b-kube-api-access-h8x42\") pod \"openshift-config-operator-7777fb866f-gmjnj\" (UID: \"c2b17da8-0524-4615-a9e5-f1c59a0fde0b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.246897 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-bound-sa-token\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.264926 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.265093 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.765070406 +0000 UTC m=+142.519433989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.265185 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-cert\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.265296 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-key\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.265376 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.265555 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e40716fd-b721-424a-84d9-935b50543fd9-serving-cert\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.265592 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e40716fd-b721-424a-84d9-935b50543fd9-config\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.265616 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.266147 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq5vq\" (UniqueName: \"kubernetes.io/projected/bfeb8a90-cb53-45c4-8650-1931e986a5f3-kube-api-access-dq5vq\") pod \"apiserver-76f77b778f-rkd9p\" (UID: \"bfeb8a90-cb53-45c4-8650-1931e986a5f3\") " pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.266176 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.766152456 +0000 UTC m=+142.520516049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.266358 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e40716fd-b721-424a-84d9-935b50543fd9-config\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.266405 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-cabundle\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.266809 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-cabundle\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.267519 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.269235 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e40716fd-b721-424a-84d9-935b50543fd9-serving-cert\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.269574 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-signing-key\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.270189 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.271516 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-cert\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.290331 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.325963 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77wzd\" (UniqueName: \"kubernetes.io/projected/1167ed60-0864-42bb-aba1-48ba9ee03867-kube-api-access-77wzd\") pod \"cluster-image-registry-operator-dc59b4c8b-knslb\" (UID: \"1167ed60-0864-42bb-aba1-48ba9ee03867\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.329526 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.365624 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whdh7\" (UniqueName: \"kubernetes.io/projected/f28ff403-47a1-47de-b3f4-f6519b75064f-kube-api-access-whdh7\") pod \"oauth-openshift-558db77b4-dgnfz\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.367592 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.367754 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/264ad201-9fa4-497f-a8c1-5f17c22ed862-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.367768 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.867747846 +0000 UTC m=+142.622111449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.367892 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.368070 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.368228 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-certs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.368277 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/104b0b18-f5c1-4257-b6e0-39ca8f30378c-proxy-tls\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.368395 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-srv-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.368471 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-node-bootstrap-token\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.368953 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.371450 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/264ad201-9fa4-497f-a8c1-5f17c22ed862-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.371623 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/86879674-3f52-439f-ab05-e2cfc81edf75-srv-cert\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.372537 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.404640 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwjps\" (UniqueName: \"kubernetes.io/projected/cfa466bf-9337-45cd-a739-c0d3b5521e13-kube-api-access-hwjps\") pod \"console-f9d7485db-xcx2z\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.407501 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22z5q\" (UniqueName: \"kubernetes.io/projected/797642e6-462e-434f-952f-ea096d1a4e47-kube-api-access-22z5q\") pod \"cluster-samples-operator-665b6dd947-tb2z9\" (UID: \"797642e6-462e-434f-952f-ea096d1a4e47\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.437203 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wfgc\" (UniqueName: \"kubernetes.io/projected/4141dca3-d75b-4da2-ad3d-47ab847ff59b-kube-api-access-2wfgc\") pod \"console-operator-58897d9998-vffxs\" (UID: \"4141dca3-d75b-4da2-ad3d-47ab847ff59b\") " pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.446474 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcwmq\" (UniqueName: \"kubernetes.io/projected/c386978b-8bd6-432e-b6df-9cacf8acd0ae-kube-api-access-wcwmq\") pod \"machine-approver-56656f9798-qrkpf\" (UID: \"c386978b-8bd6-432e-b6df-9cacf8acd0ae\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.466266 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.470736 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.472068 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:29.972042549 +0000 UTC m=+142.726406172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.478960 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8cm9\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-kube-api-access-z8cm9\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.487759 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.492376 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7pnz\" (UniqueName: \"kubernetes.io/projected/2fc54817-ea76-451c-8dc9-2bf6684dc2b8-kube-api-access-r7pnz\") pod \"apiserver-7bbb656c7d-b9zf8\" (UID: \"2fc54817-ea76-451c-8dc9-2bf6684dc2b8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.513571 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j76cb\" (UniqueName: \"kubernetes.io/projected/76a1cb2c-b73b-4d38-90b2-1ab8e91e0998-kube-api-access-j76cb\") pod \"openshift-apiserver-operator-796bbdcf4f-q7vr4\" (UID: \"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.529233 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.530250 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.532964 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzp6s\" (UniqueName: \"kubernetes.io/projected/3e446987-0b7c-46f9-8207-dfe3e84240e4-kube-api-access-rzp6s\") pod \"authentication-operator-69f744f599-bg96p\" (UID: \"3e446987-0b7c-46f9-8207-dfe3e84240e4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.542673 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/104b0b18-f5c1-4257-b6e0-39ca8f30378c-proxy-tls\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.548005 4811 request.go:700] Waited for 1.914831482s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-controller-dockercfg-c2lfx&limit=500&resourceVersion=0 Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.550162 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.553690 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.571042 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.571732 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.572286 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.07225113 +0000 UTC m=+142.826614753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: W0128 15:47:29.577226 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc386978b_8bd6_432e_b6df_9cacf8acd0ae.slice/crio-6b64734be6934d7d661314fee7e199f8ee4d17fa273ffa3dcd60fc3f493a6df0 WatchSource:0}: Error finding container 6b64734be6934d7d661314fee7e199f8ee4d17fa273ffa3dcd60fc3f493a6df0: Status 404 returned error can't find the container with id 6b64734be6934d7d661314fee7e199f8ee4d17fa273ffa3dcd60fc3f493a6df0 Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.590529 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.590988 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.604876 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.606269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-node-bootstrap-token\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.612277 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.612305 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.622896 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.629797 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.639666 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d0d13dd3-5445-427c-81f2-ddf4eed82723-certs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.649840 4811 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.656826 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.672688 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.674058 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.674626 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.17461321 +0000 UTC m=+142.928976783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.701388 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.710082 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.729921 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.751234 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.774998 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.775617 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.275568972 +0000 UTC m=+143.029932585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.779405 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.794621 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n5fd\" (UniqueName: \"kubernetes.io/projected/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-kube-api-access-7n5fd\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.806833 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phr9d\" (UniqueName: \"kubernetes.io/projected/0ac199b3-b2fa-4acf-92e5-0840b7da5c41-kube-api-access-phr9d\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jfj5\" (UID: \"0ac199b3-b2fa-4acf-92e5-0840b7da5c41\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.811891 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.823923 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.830347 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7426\" (UniqueName: \"kubernetes.io/projected/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-kube-api-access-h7426\") pod \"collect-profiles-29493585-8md6j\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.848679 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkgl9\" (UniqueName: \"kubernetes.io/projected/0439276f-d9ad-40f1-aa18-d05a1ee2b768-kube-api-access-dkgl9\") pod \"machine-api-operator-5694c8668f-c4zgz\" (UID: \"0439276f-d9ad-40f1-aa18-d05a1ee2b768\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.873509 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zp2x\" (UniqueName: \"kubernetes.io/projected/02bc99e0-e3a2-4c8b-b936-b5d7535412d0-kube-api-access-6zp2x\") pod \"downloads-7954f5f757-dssbm\" (UID: \"02bc99e0-e3a2-4c8b-b936-b5d7535412d0\") " pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.877058 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.877792 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.377774998 +0000 UTC m=+143.132138581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.889306 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l889n\" (UniqueName: \"kubernetes.io/projected/f2e6c078-567b-4c22-aece-ebc4f195e9a2-kube-api-access-l889n\") pod \"package-server-manager-789f6589d5-xg589\" (UID: \"f2e6c078-567b-4c22-aece-ebc4f195e9a2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.894114 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.908326 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpd5p\" (UniqueName: \"kubernetes.io/projected/f4a001af-cfdf-49ed-9d59-646a1b5683da-kube-api-access-rpd5p\") pod \"etcd-operator-b45778765-jtvpg\" (UID: \"f4a001af-cfdf-49ed-9d59-646a1b5683da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.929383 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhlsj\" (UniqueName: \"kubernetes.io/projected/c8aea032-97b6-474a-a09f-b04c638a8a54-kube-api-access-bhlsj\") pod \"catalog-operator-68c6474976-gxspm\" (UID: \"c8aea032-97b6-474a-a09f-b04c638a8a54\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.932710 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.946112 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0539bf34-a663-45a1-b9d2-b7a43b62b6ad-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5qxgr\" (UID: \"0539bf34-a663-45a1-b9d2-b7a43b62b6ad\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.972969 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c5167f6-623a-4b81-ad25-0e866528fd94-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfnhq\" (UID: \"2c5167f6-623a-4b81-ad25-0e866528fd94\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.974244 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.978887 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.979112 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.479083649 +0000 UTC m=+143.233447392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.979249 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:29 crc kubenswrapper[4811]: E0128 15:47:29.979814 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.479789069 +0000 UTC m=+143.234152652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:29 crc kubenswrapper[4811]: I0128 15:47:29.994986 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl6jz\" (UniqueName: \"kubernetes.io/projected/b3262acb-1ffb-43da-81c2-8271c1e8acfa-kube-api-access-bl6jz\") pod \"kube-storage-version-migrator-operator-b67b599dd-w85gc\" (UID: \"b3262acb-1ffb-43da-81c2-8271c1e8acfa\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.006184 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef30f9b4-f11c-4149-a320-539203ac5666-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-t9r9j\" (UID: \"ef30f9b4-f11c-4149-a320-539203ac5666\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.035494 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" event={"ID":"c386978b-8bd6-432e-b6df-9cacf8acd0ae","Type":"ContainerStarted","Data":"fbeec54238b2a70776bc0c4dc006f1d085b449efd92d91f94461a3928be65e30"} Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.035544 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" event={"ID":"c386978b-8bd6-432e-b6df-9cacf8acd0ae","Type":"ContainerStarted","Data":"6b64734be6934d7d661314fee7e199f8ee4d17fa273ffa3dcd60fc3f493a6df0"} Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.037669 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv27t\" (UniqueName: \"kubernetes.io/projected/9cea0670-a73c-4ef8-962e-ac12bdef4283-kube-api-access-nv27t\") pod \"dns-operator-744455d44c-lmds9\" (UID: \"9cea0670-a73c-4ef8-962e-ac12bdef4283\") " pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.042073 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.043160 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.054281 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br7bb\" (UniqueName: \"kubernetes.io/projected/4a5edfb4-e7c7-45b3-aada-9f579b2f8434-kube-api-access-br7bb\") pod \"ingress-canary-4fj7f\" (UID: \"4a5edfb4-e7c7-45b3-aada-9f579b2f8434\") " pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.073987 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-262pc\" (UniqueName: \"kubernetes.io/projected/c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f-kube-api-access-262pc\") pod \"control-plane-machine-set-operator-78cbb6b69f-shsbw\" (UID: \"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:30 crc kubenswrapper[4811]: W0128 15:47:30.073982 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2fae918_6d12_482a_9cf1_9cb27efb5f8c.slice/crio-137c6c88cb9d3c3a38787e835c1fac729f35a10601f2c89cca8267959c178335 WatchSource:0}: Error finding container 137c6c88cb9d3c3a38787e835c1fac729f35a10601f2c89cca8267959c178335: Status 404 returned error can't find the container with id 137c6c88cb9d3c3a38787e835c1fac729f35a10601f2c89cca8267959c178335 Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.080268 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.080521 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.580494553 +0000 UTC m=+143.334858146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.080831 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.081177 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.581164011 +0000 UTC m=+143.335527594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.081185 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkd9p"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.089712 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgvkd\" (UniqueName: \"kubernetes.io/projected/1cc605f1-f5a1-4f06-b05c-fb0881684445-kube-api-access-tgvkd\") pod \"router-default-5444994796-87g76\" (UID: \"1cc605f1-f5a1-4f06-b05c-fb0881684445\") " pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.107480 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.116745 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.118242 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q46tb\" (UniqueName: \"kubernetes.io/projected/6c6e1eda-08d6-4430-8d7c-64c859d57f60-kube-api-access-q46tb\") pod \"migrator-59844c95c7-47dsj\" (UID: \"6c6e1eda-08d6-4430-8d7c-64c859d57f60\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.125366 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ktm6\" (UniqueName: \"kubernetes.io/projected/c5e6c2fc-f99a-443f-a0c6-165478a1e838-kube-api-access-9ktm6\") pod \"controller-manager-879f6c89f-27m45\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.137933 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.146147 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6kk5\" (UniqueName: \"kubernetes.io/projected/e40716fd-b721-424a-84d9-935b50543fd9-kube-api-access-m6kk5\") pod \"service-ca-operator-777779d784-5t2fc\" (UID: \"e40716fd-b721-424a-84d9-935b50543fd9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.151460 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.155901 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.174463 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.178251 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.180355 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g77dg\" (UniqueName: \"kubernetes.io/projected/9763c54a-c0e1-4f5f-969e-ea775fdfa175-kube-api-access-g77dg\") pod \"machine-config-operator-74547568cd-7wsdg\" (UID: \"9763c54a-c0e1-4f5f-969e-ea775fdfa175\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.181670 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.182295 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.682266566 +0000 UTC m=+143.436630149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.184064 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.185066 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.189199 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxgb7\" (UniqueName: \"kubernetes.io/projected/c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695-kube-api-access-xxgb7\") pod \"service-ca-9c57cc56f-nzw9g\" (UID: \"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695\") " pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.192846 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dgnfz"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.195145 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.202250 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.205932 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68df2bcf-ac5e-4868-80dc-9b568e2e3eba-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d4ckz\" (UID: \"68df2bcf-ac5e-4868-80dc-9b568e2e3eba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.208857 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.216316 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.223277 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.224361 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pswnx\" (UniqueName: \"kubernetes.io/projected/264ad201-9fa4-497f-a8c1-5f17c22ed862-kube-api-access-pswnx\") pod \"multus-admission-controller-857f4d67dd-gf4gj\" (UID: \"264ad201-9fa4-497f-a8c1-5f17c22ed862\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.244719 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.253058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjksn\" (UniqueName: \"kubernetes.io/projected/86879674-3f52-439f-ab05-e2cfc81edf75-kube-api-access-xjksn\") pod \"olm-operator-6b444d44fb-bx5jn\" (UID: \"86879674-3f52-439f-ab05-e2cfc81edf75\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.253420 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" Jan 28 15:47:30 crc kubenswrapper[4811]: W0128 15:47:30.257226 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1167ed60_0864_42bb_aba1_48ba9ee03867.slice/crio-719cb70d5f9cae0ea65a1c124d4aafdb42e6de469221cd563f4a1c1369750b2a WatchSource:0}: Error finding container 719cb70d5f9cae0ea65a1c124d4aafdb42e6de469221cd563f4a1c1369750b2a: Status 404 returned error can't find the container with id 719cb70d5f9cae0ea65a1c124d4aafdb42e6de469221cd563f4a1c1369750b2a Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.264232 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4fj7f" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.265467 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rcfb\" (UniqueName: \"kubernetes.io/projected/104b0b18-f5c1-4257-b6e0-39ca8f30378c-kube-api-access-9rcfb\") pod \"machine-config-controller-84d6567774-ttn2c\" (UID: \"104b0b18-f5c1-4257-b6e0-39ca8f30378c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.273882 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vffxs"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.282040 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.286474 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.288314 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.788295569 +0000 UTC m=+143.542659152 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: W0128 15:47:30.288739 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf28ff403_47a1_47de_b3f4_f6519b75064f.slice/crio-e234cced49affed23aea5123cb526fc34527ec0add90b5c6d44d5d8d7b9be057 WatchSource:0}: Error finding container e234cced49affed23aea5123cb526fc34527ec0add90b5c6d44d5d8d7b9be057: Status 404 returned error can't find the container with id e234cced49affed23aea5123cb526fc34527ec0add90b5c6d44d5d8d7b9be057 Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.295360 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.295415 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f25hd\" (UniqueName: \"kubernetes.io/projected/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-kube-api-access-f25hd\") pod \"marketplace-operator-79b997595-jvb2s\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.302730 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xcx2z"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.303950 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.313051 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.315893 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7fcs\" (UniqueName: \"kubernetes.io/projected/d0d13dd3-5445-427c-81f2-ddf4eed82723-kube-api-access-w7fcs\") pod \"machine-config-server-lqmnb\" (UID: \"d0d13dd3-5445-427c-81f2-ddf4eed82723\") " pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.322702 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lqmnb" Jan 28 15:47:30 crc kubenswrapper[4811]: W0128 15:47:30.362723 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfa466bf_9337_45cd_a739_c0d3b5521e13.slice/crio-69198bcacb227f8d3e6450f96cb168ed9cc090da6ba286858cb5d6dcd16a4722 WatchSource:0}: Error finding container 69198bcacb227f8d3e6450f96cb168ed9cc090da6ba286858cb5d6dcd16a4722: Status 404 returned error can't find the container with id 69198bcacb227f8d3e6450f96cb168ed9cc090da6ba286858cb5d6dcd16a4722 Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.387564 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.387836 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/144e6914-af95-49dc-a18e-197adbd1ee56-apiservice-cert\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.387905 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-registration-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.387960 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-socket-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388033 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxh5p\" (UniqueName: \"kubernetes.io/projected/c6911965-9557-4504-8fc4-66b5f3c45391-kube-api-access-jxh5p\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388322 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/144e6914-af95-49dc-a18e-197adbd1ee56-webhook-cert\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388386 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4dlz\" (UniqueName: \"kubernetes.io/projected/144e6914-af95-49dc-a18e-197adbd1ee56-kube-api-access-h4dlz\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388410 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-csi-data-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388488 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-mountpoint-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388546 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/144e6914-af95-49dc-a18e-197adbd1ee56-tmpfs\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.388589 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-plugins-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.389055 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:30.889035674 +0000 UTC m=+143.643399267 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.401740 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.451621 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.463189 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.467865 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bg96p"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.473560 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.477823 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494228 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxh5p\" (UniqueName: \"kubernetes.io/projected/c6911965-9557-4504-8fc4-66b5f3c45391-kube-api-access-jxh5p\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494309 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8950881-4230-4e6a-b69d-cf9c3731c634-config-volume\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494386 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/144e6914-af95-49dc-a18e-197adbd1ee56-webhook-cert\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494484 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d8950881-4230-4e6a-b69d-cf9c3731c634-metrics-tls\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494510 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4dlz\" (UniqueName: \"kubernetes.io/projected/144e6914-af95-49dc-a18e-197adbd1ee56-kube-api-access-h4dlz\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494529 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-csi-data-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494704 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494824 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-mountpoint-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.494879 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/144e6914-af95-49dc-a18e-197adbd1ee56-tmpfs\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.495038 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-plugins-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.495091 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmbc4\" (UniqueName: \"kubernetes.io/projected/d8950881-4230-4e6a-b69d-cf9c3731c634-kube-api-access-kmbc4\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.496033 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/144e6914-af95-49dc-a18e-197adbd1ee56-apiservice-cert\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.496136 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-registration-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.496231 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-socket-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.496583 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-socket-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.502263 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-registration-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.503789 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-plugins-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.504555 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.004540179 +0000 UTC m=+143.758903762 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.505325 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-mountpoint-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.505649 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c6911965-9557-4504-8fc4-66b5f3c45391-csi-data-dir\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.506871 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/144e6914-af95-49dc-a18e-197adbd1ee56-tmpfs\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.514512 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/144e6914-af95-49dc-a18e-197adbd1ee56-webhook-cert\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.521386 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/144e6914-af95-49dc-a18e-197adbd1ee56-apiservice-cert\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.526992 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.534507 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxh5p\" (UniqueName: \"kubernetes.io/projected/c6911965-9557-4504-8fc4-66b5f3c45391-kube-api-access-jxh5p\") pod \"csi-hostpathplugin-q66fr\" (UID: \"c6911965-9557-4504-8fc4-66b5f3c45391\") " pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.535469 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.547307 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4dlz\" (UniqueName: \"kubernetes.io/projected/144e6914-af95-49dc-a18e-197adbd1ee56-kube-api-access-h4dlz\") pod \"packageserver-d55dfcdfc-tknqk\" (UID: \"144e6914-af95-49dc-a18e-197adbd1ee56\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.554992 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.586847 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:30 crc kubenswrapper[4811]: W0128 15:47:30.594499 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2e6c078_567b_4c22_aece_ebc4f195e9a2.slice/crio-efb071d39a17f17e1a80ec968ac0b860d3099519f73f34e8a044860a0e03fa37 WatchSource:0}: Error finding container efb071d39a17f17e1a80ec968ac0b860d3099519f73f34e8a044860a0e03fa37: Status 404 returned error can't find the container with id efb071d39a17f17e1a80ec968ac0b860d3099519f73f34e8a044860a0e03fa37 Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.596952 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.597096 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.097072427 +0000 UTC m=+143.851436010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.597757 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8950881-4230-4e6a-b69d-cf9c3731c634-config-volume\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.597859 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d8950881-4230-4e6a-b69d-cf9c3731c634-metrics-tls\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.597924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.597986 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmbc4\" (UniqueName: \"kubernetes.io/projected/d8950881-4230-4e6a-b69d-cf9c3731c634-kube-api-access-kmbc4\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.598776 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.098762844 +0000 UTC m=+143.853126427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.599185 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8950881-4230-4e6a-b69d-cf9c3731c634-config-volume\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.614844 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d8950881-4230-4e6a-b69d-cf9c3731c634-metrics-tls\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.630019 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmbc4\" (UniqueName: \"kubernetes.io/projected/d8950881-4230-4e6a-b69d-cf9c3731c634-kube-api-access-kmbc4\") pod \"dns-default-lb8jv\" (UID: \"d8950881-4230-4e6a-b69d-cf9c3731c634\") " pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.663741 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dssbm"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.670173 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.675769 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.676095 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.679745 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-c4zgz"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.698670 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.698992 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.198976964 +0000 UTC m=+143.953340547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.753173 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.799467 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jtvpg"] Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.800023 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.300009088 +0000 UTC m=+144.054372671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.799727 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.801668 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lmds9"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.901039 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:30 crc kubenswrapper[4811]: E0128 15:47:30.901632 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.401613177 +0000 UTC m=+144.155976760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.946290 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj"] Jan 28 15:47:30 crc kubenswrapper[4811]: I0128 15:47:30.980965 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.002424 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.002914 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.502898237 +0000 UTC m=+144.257261820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.034538 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4a001af_cfdf_49ed_9d59_646a1b5683da.slice/crio-da32b08b43d79f3917dc42b3efae8f261fdf4a6d315ee54abdf3eedf8b4775fd WatchSource:0}: Error finding container da32b08b43d79f3917dc42b3efae8f261fdf4a6d315ee54abdf3eedf8b4775fd: Status 404 returned error can't find the container with id da32b08b43d79f3917dc42b3efae8f261fdf4a6d315ee54abdf3eedf8b4775fd Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.079530 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.086357 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" event={"ID":"3e446987-0b7c-46f9-8207-dfe3e84240e4","Type":"ContainerStarted","Data":"b2a66a5ddf2726ccb5f1551f44f0c2fd289db53cba4399e8713a291af9a45ba2"} Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.088283 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode40716fd_b721_424a_84d9_935b50543fd9.slice/crio-d27997c60c9316ed5574d16e2fde672495c24a7c3858566821ef0ae80f67d4c4 WatchSource:0}: Error finding container d27997c60c9316ed5574d16e2fde672495c24a7c3858566821ef0ae80f67d4c4: Status 404 returned error can't find the container with id d27997c60c9316ed5574d16e2fde672495c24a7c3858566821ef0ae80f67d4c4 Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.103512 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.104316 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.604279271 +0000 UTC m=+144.358642854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.104425 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.105103 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.605091663 +0000 UTC m=+144.359455246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.114056 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.127857 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" event={"ID":"f4a001af-cfdf-49ed-9d59-646a1b5683da","Type":"ContainerStarted","Data":"da32b08b43d79f3917dc42b3efae8f261fdf4a6d315ee54abdf3eedf8b4775fd"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.130985 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nzw9g"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.138759 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dssbm" event={"ID":"02bc99e0-e3a2-4c8b-b936-b5d7535412d0","Type":"ContainerStarted","Data":"78b54aed83e263e042bd1850444a1ab95e1279ee5840801cb82a679ed95ddaf0"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.157733 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" event={"ID":"c8aea032-97b6-474a-a09f-b04c638a8a54","Type":"ContainerStarted","Data":"c83f45bfb2142eb7bf409de6fa12f65da111dea3479fdbf75c301dc651512869"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.165013 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" event={"ID":"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998","Type":"ContainerStarted","Data":"70f445d4815e7fc42acc614eda1a4d01603c1850daf15a94a9281851f42dfcf5"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.180469 4811 generic.go:334] "Generic (PLEG): container finished" podID="c2b17da8-0524-4615-a9e5-f1c59a0fde0b" containerID="0272909c475f80d2e84fc4209bf07d2f712ecb178c21ff65f74127a5c804fa69" exitCode=0 Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.180548 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" event={"ID":"c2b17da8-0524-4615-a9e5-f1c59a0fde0b","Type":"ContainerDied","Data":"0272909c475f80d2e84fc4209bf07d2f712ecb178c21ff65f74127a5c804fa69"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.180578 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" event={"ID":"c2b17da8-0524-4615-a9e5-f1c59a0fde0b","Type":"ContainerStarted","Data":"f3d0a98a602726d25fb436a8b43ae0d4fe2a76cfb2732387bb5e8726da920214"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.183494 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" event={"ID":"c386978b-8bd6-432e-b6df-9cacf8acd0ae","Type":"ContainerStarted","Data":"4f072cc6cc8d4e098814e5690cb0bcf72330a080d330b030699d48e34e42be72"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.185243 4811 generic.go:334] "Generic (PLEG): container finished" podID="bfeb8a90-cb53-45c4-8650-1931e986a5f3" containerID="b02cacb945e9ac0dd1ea079770a248a00e663b0c32ac7b4a7ee06596819b09a6" exitCode=0 Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.185290 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" event={"ID":"bfeb8a90-cb53-45c4-8650-1931e986a5f3","Type":"ContainerDied","Data":"b02cacb945e9ac0dd1ea079770a248a00e663b0c32ac7b4a7ee06596819b09a6"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.185306 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" event={"ID":"bfeb8a90-cb53-45c4-8650-1931e986a5f3","Type":"ContainerStarted","Data":"b065923318607de5ebb14b90d59d07fc55e7a254c72b74576b26445fad0744db"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.188193 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" event={"ID":"f2fae918-6d12-482a-9cf1-9cb27efb5f8c","Type":"ContainerStarted","Data":"2b84d2c595f459896dede0b5f34f9ffd4c6ab2ace170ff6016402dd78ac08c27"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.188227 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" event={"ID":"f2fae918-6d12-482a-9cf1-9cb27efb5f8c","Type":"ContainerStarted","Data":"137c6c88cb9d3c3a38787e835c1fac729f35a10601f2c89cca8267959c178335"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.188559 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.189688 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" event={"ID":"0ac199b3-b2fa-4acf-92e5-0840b7da5c41","Type":"ContainerStarted","Data":"bd1e7d1c0d88b9773b5dbfdac9a8f083897afed25195a9abb6334524642ef91b"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.195580 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xcx2z" event={"ID":"cfa466bf-9337-45cd-a739-c0d3b5521e13","Type":"ContainerStarted","Data":"69198bcacb227f8d3e6450f96cb168ed9cc090da6ba286858cb5d6dcd16a4722"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.200567 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" event={"ID":"1167ed60-0864-42bb-aba1-48ba9ee03867","Type":"ContainerStarted","Data":"719cb70d5f9cae0ea65a1c124d4aafdb42e6de469221cd563f4a1c1369750b2a"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.204085 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" event={"ID":"9cea0670-a73c-4ef8-962e-ac12bdef4283","Type":"ContainerStarted","Data":"cf2974478625ea7909440f3fa967c6fe0db2545111ec50f5756fc1b654740976"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.205228 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" event={"ID":"f28ff403-47a1-47de-b3f4-f6519b75064f","Type":"ContainerStarted","Data":"e234cced49affed23aea5123cb526fc34527ec0add90b5c6d44d5d8d7b9be057"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.206677 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.206827 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.706804496 +0000 UTC m=+144.461168079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.206999 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.207496 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.707487165 +0000 UTC m=+144.461850748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.214609 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vffxs" event={"ID":"4141dca3-d75b-4da2-ad3d-47ab847ff59b","Type":"ContainerStarted","Data":"ccb436f14d4f7071f98e46570646989a9ed6dcf9f6699a46684f348bd205c469"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.218133 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-87g76" event={"ID":"1cc605f1-f5a1-4f06-b05c-fb0881684445","Type":"ContainerStarted","Data":"ed8bda49a290bcdb9195bd7f0230ee93c0a9b17ae75fe180ea1011e5660839af"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.230566 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lqmnb" event={"ID":"d0d13dd3-5445-427c-81f2-ddf4eed82723","Type":"ContainerStarted","Data":"e92a448edf6234e909e1b25332c81ec26e3c4413b0af4467624fb1ae68a3b9e6"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.232320 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" event={"ID":"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec","Type":"ContainerStarted","Data":"9833bb83b45754ba9dfe9acf197478654013ced57787fbdbc34949976dc50fc5"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.243012 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" event={"ID":"f2e6c078-567b-4c22-aece-ebc4f195e9a2","Type":"ContainerStarted","Data":"efb071d39a17f17e1a80ec968ac0b860d3099519f73f34e8a044860a0e03fa37"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.258759 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" event={"ID":"797642e6-462e-434f-952f-ea096d1a4e47","Type":"ContainerStarted","Data":"7cd4a25f64bf1f832d36ec184a710950f851d2814c5123759e2d4ce3de090b29"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.271035 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" event={"ID":"0439276f-d9ad-40f1-aa18-d05a1ee2b768","Type":"ContainerStarted","Data":"6b09b9ada7b423430de82a25cf2489cc4e1b65ba39afa36c8f563c93c8cc445a"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.274462 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" event={"ID":"2fc54817-ea76-451c-8dc9-2bf6684dc2b8","Type":"ContainerStarted","Data":"41232b718594cfa94ab17d527aecb6a6362da65cbf34a850027745d6e7046242"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.276082 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" event={"ID":"ef30f9b4-f11c-4149-a320-539203ac5666","Type":"ContainerStarted","Data":"40204bb74c840fc2040f2a1f609605464ed67dc8146b23c927a3f533f532a9db"} Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.280396 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" event={"ID":"2c5167f6-623a-4b81-ad25-0e866528fd94","Type":"ContainerStarted","Data":"9745465a7c60356ee5c2a527558c48c8397bc03f9b6ca06b5479ecf7a33a7230"} Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.284267 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6d392c6_2fcb_4d68_81da_6cb52a6a5e8f.slice/crio-c5f3e8f9f628a1707476cb43a5b0afd04aeb033b76cc261119de8a3b5c37d487 WatchSource:0}: Error finding container c5f3e8f9f628a1707476cb43a5b0afd04aeb033b76cc261119de8a3b5c37d487: Status 404 returned error can't find the container with id c5f3e8f9f628a1707476cb43a5b0afd04aeb033b76cc261119de8a3b5c37d487 Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.296374 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc40e8bbb_6ea1_4838_8ea1_a6ece5ee2695.slice/crio-70d6e45c46fd9c7d5960322c7bcf2a62d3e3e654a522703623a9b4f17de05c65 WatchSource:0}: Error finding container 70d6e45c46fd9c7d5960322c7bcf2a62d3e3e654a522703623a9b4f17de05c65: Status 404 returned error can't find the container with id 70d6e45c46fd9c7d5960322c7bcf2a62d3e3e654a522703623a9b4f17de05c65 Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.304560 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.308961 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.310143 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.810125433 +0000 UTC m=+144.564489016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.317615 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.332602 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gf4gj"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.399021 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4fj7f"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.410642 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.410968 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:31.910953231 +0000 UTC m=+144.665316814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.443650 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.485235 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.511747 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.512101 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.012086747 +0000 UTC m=+144.766450330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.531962 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86879674_3f52_439f_ab05_e2cfc81edf75.slice/crio-d3841a8bfa2c6674aed26525bd2e994906e95c045d95f74786a4c95fd5d76700 WatchSource:0}: Error finding container d3841a8bfa2c6674aed26525bd2e994906e95c045d95f74786a4c95fd5d76700: Status 404 returned error can't find the container with id d3841a8bfa2c6674aed26525bd2e994906e95c045d95f74786a4c95fd5d76700 Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.549781 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0539bf34_a663_45a1_b9d2_b7a43b62b6ad.slice/crio-a852deb06fa848ca1b72865a92ed409afce9a2eae52c5566ff598506eee30b8f WatchSource:0}: Error finding container a852deb06fa848ca1b72865a92ed409afce9a2eae52c5566ff598506eee30b8f: Status 404 returned error can't find the container with id a852deb06fa848ca1b72865a92ed409afce9a2eae52c5566ff598506eee30b8f Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.598356 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.616138 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.616744 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.116729251 +0000 UTC m=+144.871092834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.645175 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.664788 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.717883 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.718256 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.218230696 +0000 UTC m=+144.972594279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.718395 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.719328 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.219319537 +0000 UTC m=+144.973683120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.747637 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-lb8jv"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.755954 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jvb2s"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.762568 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-27m45"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.773834 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q66fr"] Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.779308 4811 csr.go:261] certificate signing request csr-4k4nf is approved, waiting to be issued Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.793313 4811 csr.go:257] certificate signing request csr-4k4nf is issued Jan 28 15:47:31 crc kubenswrapper[4811]: W0128 15:47:31.800778 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod144e6914_af95_49dc_a18e_197adbd1ee56.slice/crio-9d88c86990403ad5c843f2a8c07a1bea2b836de81b83d67cbe50414431dc22de WatchSource:0}: Error finding container 9d88c86990403ad5c843f2a8c07a1bea2b836de81b83d67cbe50414431dc22de: Status 404 returned error can't find the container with id 9d88c86990403ad5c843f2a8c07a1bea2b836de81b83d67cbe50414431dc22de Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.819760 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.820020 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.3199968 +0000 UTC m=+145.074360423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:31 crc kubenswrapper[4811]: I0128 15:47:31.921442 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:31 crc kubenswrapper[4811]: E0128 15:47:31.922148 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.422135054 +0000 UTC m=+145.176498637 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.026471 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.027261 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.527241711 +0000 UTC m=+145.281605294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.069644 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" podStartSLOduration=124.069624696 podStartE2EDuration="2m4.069624696s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.037181266 +0000 UTC m=+144.791544839" watchObservedRunningTime="2026-01-28 15:47:32.069624696 +0000 UTC m=+144.823988279" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.111719 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qrkpf" podStartSLOduration=125.111698954 podStartE2EDuration="2m5.111698954s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.075193341 +0000 UTC m=+144.829556944" watchObservedRunningTime="2026-01-28 15:47:32.111698954 +0000 UTC m=+144.866062537" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.137139 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.137562 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.637550392 +0000 UTC m=+145.391913975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.238331 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.242057 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.742012609 +0000 UTC m=+145.496376192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.243825 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.244420 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.744401736 +0000 UTC m=+145.498765319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.309795 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" event={"ID":"797642e6-462e-434f-952f-ea096d1a4e47","Type":"ContainerStarted","Data":"53d9e31d2909788194da431c738f570b190175587f10b2bb90118bff16587f76"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.310145 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" event={"ID":"797642e6-462e-434f-952f-ea096d1a4e47","Type":"ContainerStarted","Data":"0d6efc675042f43db72d6b8004bde3464eb4225454b4d8a726bbfd55e5c17cd0"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.316078 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" event={"ID":"2c5167f6-623a-4b81-ad25-0e866528fd94","Type":"ContainerStarted","Data":"b9680b00b381eef510dcf53ee3c254ce829f86f6a0c5cf8da6ae85b7ac573548"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.345850 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.346320 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.846297144 +0000 UTC m=+145.600660717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.364003 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" event={"ID":"0439276f-d9ad-40f1-aa18-d05a1ee2b768","Type":"ContainerStarted","Data":"200fb079fd5be8f73c47e72525e982bb58572feaf87758357b435fdf12ca8703"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.387644 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vffxs" event={"ID":"4141dca3-d75b-4da2-ad3d-47ab847ff59b","Type":"ContainerStarted","Data":"df619fdb7afb6b14f42c05efc4b5f496f410b9b150981607c535cd2d834c75d2"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.388544 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.391861 4811 patch_prober.go:28] interesting pod/console-operator-58897d9998-vffxs container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/readyz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.391906 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-vffxs" podUID="4141dca3-d75b-4da2-ad3d-47ab847ff59b" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/readyz\": dial tcp 10.217.0.20:8443: connect: connection refused" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.396832 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" event={"ID":"b3262acb-1ffb-43da-81c2-8271c1e8acfa","Type":"ContainerStarted","Data":"53a702523d2c5657e7c1b3b6b15f30c74904303b37074f4c1644ba700a5aa741"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.396877 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" event={"ID":"b3262acb-1ffb-43da-81c2-8271c1e8acfa","Type":"ContainerStarted","Data":"2cfc1311a8c647d8f02d7d6096178f79ce66d2e20d82813f52b2f6a0c7f4d541"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.405242 4811 generic.go:334] "Generic (PLEG): container finished" podID="2fc54817-ea76-451c-8dc9-2bf6684dc2b8" containerID="e0be5569d7005a4a07cd71e5ef0a7a211700f1ff76c6abaaafb2293b1649b2d6" exitCode=0 Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.405296 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" event={"ID":"2fc54817-ea76-451c-8dc9-2bf6684dc2b8","Type":"ContainerDied","Data":"e0be5569d7005a4a07cd71e5ef0a7a211700f1ff76c6abaaafb2293b1649b2d6"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.407916 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" event={"ID":"144e6914-af95-49dc-a18e-197adbd1ee56","Type":"ContainerStarted","Data":"9d88c86990403ad5c843f2a8c07a1bea2b836de81b83d67cbe50414431dc22de"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.410490 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" event={"ID":"104b0b18-f5c1-4257-b6e0-39ca8f30378c","Type":"ContainerStarted","Data":"2ed0d0ca178b69163b8e95dc917ebf7665e29b6797f2ea847fb4a7a85d66dae7"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.412581 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" event={"ID":"9763c54a-c0e1-4f5f-969e-ea775fdfa175","Type":"ContainerStarted","Data":"a8d1f0cbcf95939314798c3e5ceae444ce5a658060a5cbe2b0cd5b4528dbed8b"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.412603 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" event={"ID":"9763c54a-c0e1-4f5f-969e-ea775fdfa175","Type":"ContainerStarted","Data":"60971104b270d4f30e8f7fbd21bb34e71446b87ea29b659d0be6440936ef0f3d"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.414113 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-87g76" event={"ID":"1cc605f1-f5a1-4f06-b05c-fb0881684445","Type":"ContainerStarted","Data":"799d644e7bdfb2cdf7655f0e6e89fa24bfb4a7367e9a22c11492b89139061481"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.416830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" event={"ID":"ef30f9b4-f11c-4149-a320-539203ac5666","Type":"ContainerStarted","Data":"dbefcc0f7b4836e90c31c5d99a0518cbf0a0d3b05bef23f5917c5ca539dbb844"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.423302 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" event={"ID":"264ad201-9fa4-497f-a8c1-5f17c22ed862","Type":"ContainerStarted","Data":"701c14d0385f0d6fa83fba53a75956194a437f2d5a2ca76e0a3ac1544c767bfc"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.424660 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" event={"ID":"68df2bcf-ac5e-4868-80dc-9b568e2e3eba","Type":"ContainerStarted","Data":"cf835944634ac619f74ce84e37f2a83080420b2c85f5d59544669f788469a0a3"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.426753 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" event={"ID":"f28ff403-47a1-47de-b3f4-f6519b75064f","Type":"ContainerStarted","Data":"84169e25180ec9a14d024ee723345e5a801b6eba011a74a1f79967734c92f32a"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.427923 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.435920 4811 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dgnfz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.436303 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" podUID="f28ff403-47a1-47de-b3f4-f6519b75064f" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.441086 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" event={"ID":"e40716fd-b721-424a-84d9-935b50543fd9","Type":"ContainerStarted","Data":"2412d25635a5c392cfee9afee901bb43e06829de2fc499d851da8f6bebbfacdb"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.441117 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" event={"ID":"e40716fd-b721-424a-84d9-935b50543fd9","Type":"ContainerStarted","Data":"d27997c60c9316ed5574d16e2fde672495c24a7c3858566821ef0ae80f67d4c4"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.443496 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" event={"ID":"c8aea032-97b6-474a-a09f-b04c638a8a54","Type":"ContainerStarted","Data":"ecdd6acf30678d149789cd2cbe9de7a74e5ad507ce316796c7652a57ff7c68c5"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.444089 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.446918 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.447701 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:32.947683546 +0000 UTC m=+145.702047129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.466729 4811 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-gxspm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.466788 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" podUID="c8aea032-97b6-474a-a09f-b04c638a8a54" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.468468 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" event={"ID":"76a1cb2c-b73b-4d38-90b2-1ab8e91e0998","Type":"ContainerStarted","Data":"940068dc59867d2e095ed2a74593149fb14e935e998486cf9cd6c25a0e5ee6f2"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.471137 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" event={"ID":"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f","Type":"ContainerStarted","Data":"662c2db2282a9d08d732b8915f21cb993dfecfbb55abb1a4f9204e417f02383b"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.471292 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" event={"ID":"c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f","Type":"ContainerStarted","Data":"c5f3e8f9f628a1707476cb43a5b0afd04aeb033b76cc261119de8a3b5c37d487"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.477329 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" event={"ID":"1167ed60-0864-42bb-aba1-48ba9ee03867","Type":"ContainerStarted","Data":"df4b104570a6a9f6c9c3164841f752206c8207040d814a6aed95b0dac3b4d93a"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.479016 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" event={"ID":"7b0b6c1f-a9c1-41dc-8495-7646a81b616b","Type":"ContainerStarted","Data":"e6e63e170befe90a817ef759d8c0d030c60277a4bc872ffc07f2e54e5c4c6fde"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.481778 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xcx2z" event={"ID":"cfa466bf-9337-45cd-a739-c0d3b5521e13","Type":"ContainerStarted","Data":"dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.489552 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" event={"ID":"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695","Type":"ContainerStarted","Data":"70d6e45c46fd9c7d5960322c7bcf2a62d3e3e654a522703623a9b4f17de05c65"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.491128 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" event={"ID":"c6911965-9557-4504-8fc4-66b5f3c45391","Type":"ContainerStarted","Data":"628f4ad82f96c58b47f88ffd73432cc522de501684a920e81a9a02d914bce81d"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.514630 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" event={"ID":"0ac199b3-b2fa-4acf-92e5-0840b7da5c41","Type":"ContainerStarted","Data":"464d3e3cebe7f2b2c512daf34376f4b09f01171bef074a7682fe543e0207ed45"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.517839 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" event={"ID":"c5e6c2fc-f99a-443f-a0c6-165478a1e838","Type":"ContainerStarted","Data":"dfde520ddc1c96bd46c977cbcfee90f16853289dd792ce0b49dab50952d7fe9f"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.518786 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" event={"ID":"86879674-3f52-439f-ab05-e2cfc81edf75","Type":"ContainerStarted","Data":"d3841a8bfa2c6674aed26525bd2e994906e95c045d95f74786a4c95fd5d76700"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.528027 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" event={"ID":"9cea0670-a73c-4ef8-962e-ac12bdef4283","Type":"ContainerStarted","Data":"f53a3901765dc9a2feab9ceb1ca3ef235f8ae4a615bfa943ec864affaeaaa57b"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.534156 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" event={"ID":"0539bf34-a663-45a1-b9d2-b7a43b62b6ad","Type":"ContainerStarted","Data":"a852deb06fa848ca1b72865a92ed409afce9a2eae52c5566ff598506eee30b8f"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.540500 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" event={"ID":"f2e6c078-567b-4c22-aece-ebc4f195e9a2","Type":"ContainerStarted","Data":"446fa627366caca7fe6e27b901168371004d664960078cd57cc2538efb102ad8"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.543901 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" event={"ID":"3e446987-0b7c-46f9-8207-dfe3e84240e4","Type":"ContainerStarted","Data":"bd940772d754040567cb99bca7d427ebbe6c8791dcaf6630954ca56c364bcebb"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.547607 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.550202 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.050183381 +0000 UTC m=+145.804546964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.559356 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4fj7f" event={"ID":"4a5edfb4-e7c7-45b3-aada-9f579b2f8434","Type":"ContainerStarted","Data":"248b78ff11a5a7027a1e4caa6d4539864ec5649b71f80cda35a4bbec47117824"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.559421 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4fj7f" event={"ID":"4a5edfb4-e7c7-45b3-aada-9f579b2f8434","Type":"ContainerStarted","Data":"ad360f741b99c04e583ad1288288dba2f1f46a5dfbf5fe6935030240fb632f09"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.601063 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" event={"ID":"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec","Type":"ContainerStarted","Data":"5bbce93fed947b56c71593c12958888367245730eb8aefe087e57cda86ee80bf"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.611587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" event={"ID":"c2b17da8-0524-4615-a9e5-f1c59a0fde0b","Type":"ContainerStarted","Data":"ad66640899b013f2ee5e9bf7b20d3985c572998d7e854b1e5224f0fb3c1fd53e"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.611742 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.623604 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" podStartSLOduration=125.623555737 podStartE2EDuration="2m5.623555737s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.610005361 +0000 UTC m=+145.364368944" watchObservedRunningTime="2026-01-28 15:47:32.623555737 +0000 UTC m=+145.377919330" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.632524 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dssbm" event={"ID":"02bc99e0-e3a2-4c8b-b936-b5d7535412d0","Type":"ContainerStarted","Data":"449f7ed758309f2cb35ef21e840f45adac23900b2e42114afc76b2ee18770f4e"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.632715 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.642723 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lqmnb" event={"ID":"d0d13dd3-5445-427c-81f2-ddf4eed82723","Type":"ContainerStarted","Data":"471b83fc8aa60d3443212cafe24f2b87b2cd08687f6354c2a86ddc2abee6afd7"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.643263 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfnhq" podStartSLOduration=124.643248163 podStartE2EDuration="2m4.643248163s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.641821304 +0000 UTC m=+145.396184887" watchObservedRunningTime="2026-01-28 15:47:32.643248163 +0000 UTC m=+145.397611746" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.644087 4811 patch_prober.go:28] interesting pod/downloads-7954f5f757-dssbm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.644125 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dssbm" podUID="02bc99e0-e3a2-4c8b-b936-b5d7535412d0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.650207 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.653443 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.153412655 +0000 UTC m=+145.907776238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.659582 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-lb8jv" event={"ID":"d8950881-4230-4e6a-b69d-cf9c3731c634","Type":"ContainerStarted","Data":"75504a5b0e2a23ba3e27e5906d5e6a15bb9896c438d59db0e45759fb4da98ee9"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.716893 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-4fj7f" podStartSLOduration=5.716880607 podStartE2EDuration="5.716880607s" podCreationTimestamp="2026-01-28 15:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.715318193 +0000 UTC m=+145.469681776" watchObservedRunningTime="2026-01-28 15:47:32.716880607 +0000 UTC m=+145.471244190" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.717340 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" event={"ID":"6c6e1eda-08d6-4430-8d7c-64c859d57f60","Type":"ContainerStarted","Data":"a56f4b6d37cb9cc6fa902725c7083c348a725016b3d7c08dd20c533fe05851e4"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.717371 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" event={"ID":"6c6e1eda-08d6-4430-8d7c-64c859d57f60","Type":"ContainerStarted","Data":"7d4c1f583444c3d3fdc85842b9775ab76c11dd02afa487071306de3fd5d45269"} Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.752645 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.753764 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.253723199 +0000 UTC m=+146.008086782 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.768502 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-knslb" podStartSLOduration=124.768423206 podStartE2EDuration="2m4.768423206s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.765369152 +0000 UTC m=+145.519732735" watchObservedRunningTime="2026-01-28 15:47:32.768423206 +0000 UTC m=+145.522786949" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.795100 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-28 15:42:31 +0000 UTC, rotation deadline is 2026-12-21 15:16:30.236827952 +0000 UTC Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.795534 4811 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7847h28m57.441298083s for next certificate rotation Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.815815 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tb2z9" podStartSLOduration=125.815795031 podStartE2EDuration="2m5.815795031s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.813314502 +0000 UTC m=+145.567678085" watchObservedRunningTime="2026-01-28 15:47:32.815795031 +0000 UTC m=+145.570158624" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.850872 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-bg96p" podStartSLOduration=125.850855414 podStartE2EDuration="2m5.850855414s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.849506056 +0000 UTC m=+145.603869639" watchObservedRunningTime="2026-01-28 15:47:32.850855414 +0000 UTC m=+145.605218987" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.853903 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.854588 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.354573957 +0000 UTC m=+146.108937540 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.924867 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-vffxs" podStartSLOduration=125.924845187 podStartE2EDuration="2m5.924845187s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.886459582 +0000 UTC m=+145.640823165" watchObservedRunningTime="2026-01-28 15:47:32.924845187 +0000 UTC m=+145.679208770" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.929204 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-87g76" podStartSLOduration=124.929188168 podStartE2EDuration="2m4.929188168s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.9231307 +0000 UTC m=+145.677494293" watchObservedRunningTime="2026-01-28 15:47:32.929188168 +0000 UTC m=+145.683551751" Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.956057 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.956311 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.45628487 +0000 UTC m=+146.210648453 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.956554 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:32 crc kubenswrapper[4811]: E0128 15:47:32.957332 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.457319348 +0000 UTC m=+146.211682931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:32 crc kubenswrapper[4811]: I0128 15:47:32.982055 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w85gc" podStartSLOduration=124.982029804 podStartE2EDuration="2m4.982029804s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.97216874 +0000 UTC m=+145.726532333" watchObservedRunningTime="2026-01-28 15:47:32.982029804 +0000 UTC m=+145.736393377" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.001453 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" podStartSLOduration=125.001418162 podStartE2EDuration="2m5.001418162s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:32.998783849 +0000 UTC m=+145.753147432" watchObservedRunningTime="2026-01-28 15:47:33.001418162 +0000 UTC m=+145.755781745" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.062006 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.062584 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.562564798 +0000 UTC m=+146.316928371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.090558 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.090613 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.102139 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jfj5" podStartSLOduration=125.102097666 podStartE2EDuration="2m5.102097666s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.088293893 +0000 UTC m=+145.842657476" watchObservedRunningTime="2026-01-28 15:47:33.102097666 +0000 UTC m=+145.856461249" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.140335 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q7vr4" podStartSLOduration=126.140316676 podStartE2EDuration="2m6.140316676s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.139806842 +0000 UTC m=+145.894170425" watchObservedRunningTime="2026-01-28 15:47:33.140316676 +0000 UTC m=+145.894680259" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.182303 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shsbw" podStartSLOduration=125.182289121 podStartE2EDuration="2m5.182289121s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.180843451 +0000 UTC m=+145.935207034" watchObservedRunningTime="2026-01-28 15:47:33.182289121 +0000 UTC m=+145.936652704" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.186886 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.190288 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.190855 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.690843258 +0000 UTC m=+146.445206841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.197814 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:33 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:33 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:33 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.197877 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.294949 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.295120 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.795095671 +0000 UTC m=+146.549459264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.298653 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.798634709 +0000 UTC m=+146.552998292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.305807 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-xcx2z" podStartSLOduration=126.305787427 podStartE2EDuration="2m6.305787427s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.248742684 +0000 UTC m=+146.003106267" watchObservedRunningTime="2026-01-28 15:47:33.305787427 +0000 UTC m=+146.060151020" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.307550 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.323887 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-t9r9j" podStartSLOduration=125.323865179 podStartE2EDuration="2m5.323865179s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.304101121 +0000 UTC m=+146.058464714" watchObservedRunningTime="2026-01-28 15:47:33.323865179 +0000 UTC m=+146.078228762" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.327997 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5t2fc" podStartSLOduration=125.327979423 podStartE2EDuration="2m5.327979423s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.326340648 +0000 UTC m=+146.080704231" watchObservedRunningTime="2026-01-28 15:47:33.327979423 +0000 UTC m=+146.082343006" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.352026 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" podStartSLOduration=125.3520051 podStartE2EDuration="2m5.3520051s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.34661984 +0000 UTC m=+146.100983423" watchObservedRunningTime="2026-01-28 15:47:33.3520051 +0000 UTC m=+146.106368683" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.367415 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-lqmnb" podStartSLOduration=6.367396247 podStartE2EDuration="6.367396247s" podCreationTimestamp="2026-01-28 15:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.364765374 +0000 UTC m=+146.119128967" watchObservedRunningTime="2026-01-28 15:47:33.367396247 +0000 UTC m=+146.121759830" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.386277 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-dssbm" podStartSLOduration=126.386263331 podStartE2EDuration="2m6.386263331s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.383770581 +0000 UTC m=+146.138134164" watchObservedRunningTime="2026-01-28 15:47:33.386263331 +0000 UTC m=+146.140626914" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.402754 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" podStartSLOduration=125.402728027 podStartE2EDuration="2m5.402728027s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.400393043 +0000 UTC m=+146.154756636" watchObservedRunningTime="2026-01-28 15:47:33.402728027 +0000 UTC m=+146.157091610" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.410592 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.410747 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.910719989 +0000 UTC m=+146.665083572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.411040 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.412170 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:33.912149709 +0000 UTC m=+146.666513292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.513498 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.513704 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.013671316 +0000 UTC m=+146.768034899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.513911 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.514225 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.014211051 +0000 UTC m=+146.768574634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.615260 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.615809 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.115586933 +0000 UTC m=+146.869950526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.616100 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.616623 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.116613072 +0000 UTC m=+146.870976655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.716962 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.717349 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.217325417 +0000 UTC m=+146.971689000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.739193 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" event={"ID":"bfeb8a90-cb53-45c4-8650-1931e986a5f3","Type":"ContainerStarted","Data":"b257237e7222aa1e93acbebfe5ddcdbec230b3fecf995b6281d0a2b3c3825fd8"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.739252 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" event={"ID":"bfeb8a90-cb53-45c4-8650-1931e986a5f3","Type":"ContainerStarted","Data":"2d924a590ed274e06f54420532bb965108de313cefccc608833be0d2ca4bc097"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.773023 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" podStartSLOduration=126.773005982 podStartE2EDuration="2m6.773005982s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.447608362 +0000 UTC m=+146.201971945" watchObservedRunningTime="2026-01-28 15:47:33.773005982 +0000 UTC m=+146.527369575" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.773708 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" event={"ID":"144e6914-af95-49dc-a18e-197adbd1ee56","Type":"ContainerStarted","Data":"4f02c67352a80b41f1bcc50dae589188672a20e5821bbd12eea00e6e731a8b1b"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.774741 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.774734 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" podStartSLOduration=126.774724729 podStartE2EDuration="2m6.774724729s" podCreationTimestamp="2026-01-28 15:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.772655432 +0000 UTC m=+146.527019025" watchObservedRunningTime="2026-01-28 15:47:33.774724729 +0000 UTC m=+146.529088312" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.783575 4811 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-tknqk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.783628 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" podUID="144e6914-af95-49dc-a18e-197adbd1ee56" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.817029 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" event={"ID":"f2e6c078-567b-4c22-aece-ebc4f195e9a2","Type":"ContainerStarted","Data":"051990cb5c8201199ea7f504fa987e892528bdf0d79fcb4b2348e19700fba833"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.818250 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.822351 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.823681 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.323667627 +0000 UTC m=+147.078031300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.833743 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" event={"ID":"0439276f-d9ad-40f1-aa18-d05a1ee2b768","Type":"ContainerStarted","Data":"dc6d63c6799f131f066755306bd3e390dfccc6ddfd2ac7f50c76b0792106beca"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.848705 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" event={"ID":"86879674-3f52-439f-ab05-e2cfc81edf75","Type":"ContainerStarted","Data":"24c9968668015f645c2872d9290f3a69cdc924bc39bc7ca6839a21463589cfba"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.849722 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.852558 4811 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-bx5jn container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" start-of-body= Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.852604 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" podUID="86879674-3f52-439f-ab05-e2cfc81edf75" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.855801 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" event={"ID":"264ad201-9fa4-497f-a8c1-5f17c22ed862","Type":"ContainerStarted","Data":"98d0a7fbe60bf9bc00b3edebbcd0213f0d682b008247fef7869918ee5771edf8"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.855841 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" event={"ID":"264ad201-9fa4-497f-a8c1-5f17c22ed862","Type":"ContainerStarted","Data":"5142ef01ebce1f714f690294418a098ede34a82010a855561ddf512c3435f1d3"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.857829 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" event={"ID":"9763c54a-c0e1-4f5f-969e-ea775fdfa175","Type":"ContainerStarted","Data":"814162ae75444e3af53932656e44b2702bca65d01421f6992c86a2ee31609052"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.866571 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" podStartSLOduration=125.866391923 podStartE2EDuration="2m5.866391923s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.806357977 +0000 UTC m=+146.560721570" watchObservedRunningTime="2026-01-28 15:47:33.866391923 +0000 UTC m=+146.620755516" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.868122 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-47dsj" event={"ID":"6c6e1eda-08d6-4430-8d7c-64c859d57f60","Type":"ContainerStarted","Data":"91cfea7be6a1ced15633426b23523ff544b0009b91723c97492fc3f111884131"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.869321 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" podStartSLOduration=125.869309174 podStartE2EDuration="2m5.869309174s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.86448307 +0000 UTC m=+146.618846663" watchObservedRunningTime="2026-01-28 15:47:33.869309174 +0000 UTC m=+146.623672767" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.889342 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7wsdg" podStartSLOduration=125.889322669 podStartE2EDuration="2m5.889322669s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.88212953 +0000 UTC m=+146.636493113" watchObservedRunningTime="2026-01-28 15:47:33.889322669 +0000 UTC m=+146.643686252" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.900126 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" event={"ID":"c40e8bbb-6ea1-4838-8ea1-a6ece5ee2695","Type":"ContainerStarted","Data":"812210c6d89abb40e06e0d175b85f19397a2f25b52e9054e3b0c795f55808012"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.910914 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" podStartSLOduration=125.910898917 podStartE2EDuration="2m5.910898917s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.908588154 +0000 UTC m=+146.662951737" watchObservedRunningTime="2026-01-28 15:47:33.910898917 +0000 UTC m=+146.665262500" Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.925673 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.925932 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.425914394 +0000 UTC m=+147.180277977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.926094 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:33 crc kubenswrapper[4811]: E0128 15:47:33.927583 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.42757266 +0000 UTC m=+147.181936243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.936028 4811 generic.go:334] "Generic (PLEG): container finished" podID="78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" containerID="5bbce93fed947b56c71593c12958888367245730eb8aefe087e57cda86ee80bf" exitCode=0 Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.936148 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" event={"ID":"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec","Type":"ContainerDied","Data":"5bbce93fed947b56c71593c12958888367245730eb8aefe087e57cda86ee80bf"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.966165 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-lb8jv" event={"ID":"d8950881-4230-4e6a-b69d-cf9c3731c634","Type":"ContainerStarted","Data":"36231a58e0f4a0a26288dbdf849dffcadc9a54f208d2ca7d32875087aef5e495"} Jan 28 15:47:33 crc kubenswrapper[4811]: I0128 15:47:33.989821 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-gf4gj" podStartSLOduration=125.989799027 podStartE2EDuration="2m5.989799027s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.950056604 +0000 UTC m=+146.704420197" watchObservedRunningTime="2026-01-28 15:47:33.989799027 +0000 UTC m=+146.744162610" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.030218 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.031155 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" event={"ID":"f4a001af-cfdf-49ed-9d59-646a1b5683da","Type":"ContainerStarted","Data":"4956ad3d6f7b9352da3c378a36eb3db1f036ee9ac9a3cc02c12446f15cd0ae19"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.031680 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.032848 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.532832961 +0000 UTC m=+147.287196544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.033394 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-c4zgz" podStartSLOduration=126.033375066 podStartE2EDuration="2m6.033375066s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:33.990478636 +0000 UTC m=+146.744842219" watchObservedRunningTime="2026-01-28 15:47:34.033375066 +0000 UTC m=+146.787738649" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.052003 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" event={"ID":"68df2bcf-ac5e-4868-80dc-9b568e2e3eba","Type":"ContainerStarted","Data":"d6d7cb6c2bff39a927b6a1368095bd7e0a6df0567be329e55c4e8baa41e101cb"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.052055 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" event={"ID":"68df2bcf-ac5e-4868-80dc-9b568e2e3eba","Type":"ContainerStarted","Data":"9a6c786bf15b040f80f8b6cda4899eedfa37caf4b399730a6bf8929a230a93ea"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.055789 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" event={"ID":"2fc54817-ea76-451c-8dc9-2bf6684dc2b8","Type":"ContainerStarted","Data":"cb8acb14d51bd38ffde45bd67f3d5bf9d6e8e269197576ec3e2876c14f2ab0dd"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.066413 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" event={"ID":"c5e6c2fc-f99a-443f-a0c6-165478a1e838","Type":"ContainerStarted","Data":"1cb15ae328d3dbb0e4c2fb5ae2b6c3ae625ffc1f6afa3f2e37173551a93e6425"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.067361 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.089801 4811 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-27m45 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.090118 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.095331 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" event={"ID":"104b0b18-f5c1-4257-b6e0-39ca8f30378c","Type":"ContainerStarted","Data":"e739996fc433f88717c4a1a36e3bcd6bf1d29990f1a0325149b54a71df4a6e8f"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.095389 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" event={"ID":"104b0b18-f5c1-4257-b6e0-39ca8f30378c","Type":"ContainerStarted","Data":"2490d27a08c75bec8a9d5b0a7974f646184bb54f371824c1929ac75ca8c2548c"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.111640 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" event={"ID":"7b0b6c1f-a9c1-41dc-8495-7646a81b616b","Type":"ContainerStarted","Data":"6653d9c87aa03acc2e877c7b624742f3a764f1d61816d0f5450145e5b1b9f880"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.112523 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.119216 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-nzw9g" podStartSLOduration=126.119198157 podStartE2EDuration="2m6.119198157s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.052916289 +0000 UTC m=+146.807279872" watchObservedRunningTime="2026-01-28 15:47:34.119198157 +0000 UTC m=+146.873561750" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.119520 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4ckz" podStartSLOduration=126.119514676 podStartE2EDuration="2m6.119514676s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.116838562 +0000 UTC m=+146.871202145" watchObservedRunningTime="2026-01-28 15:47:34.119514676 +0000 UTC m=+146.873878259" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.126577 4811 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-jvb2s container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.126643 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.136248 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.136782 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.636761985 +0000 UTC m=+147.391125568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.138984 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" event={"ID":"9cea0670-a73c-4ef8-962e-ac12bdef4283","Type":"ContainerStarted","Data":"198918bc003dca235eb20b6242067779b5b821612454737cf389b11573db638d"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.150658 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-jtvpg" podStartSLOduration=126.150645471 podStartE2EDuration="2m6.150645471s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.148559172 +0000 UTC m=+146.902922755" watchObservedRunningTime="2026-01-28 15:47:34.150645471 +0000 UTC m=+146.905009044" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.161540 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" event={"ID":"0539bf34-a663-45a1-b9d2-b7a43b62b6ad","Type":"ContainerStarted","Data":"0cfe41b6a0d818c2a9172541ec8a0732c354fa61532e87b39ee50555c3eb69fc"} Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.163114 4811 patch_prober.go:28] interesting pod/downloads-7954f5f757-dssbm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.163159 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dssbm" podUID="02bc99e0-e3a2-4c8b-b936-b5d7535412d0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.192006 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.192479 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gxspm" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.194817 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:34 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:34 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:34 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.195079 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.237209 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.237976 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.737954433 +0000 UTC m=+147.492318016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.238632 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.249811 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.749787861 +0000 UTC m=+147.504151444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.270142 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" podStartSLOduration=126.270120975 podStartE2EDuration="2m6.270120975s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.259224723 +0000 UTC m=+147.013588306" watchObservedRunningTime="2026-01-28 15:47:34.270120975 +0000 UTC m=+147.024484558" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.342050 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.342458 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.842422502 +0000 UTC m=+147.596786095 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.351956 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ttn2c" podStartSLOduration=126.351938195 podStartE2EDuration="2m6.351938195s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.351592306 +0000 UTC m=+147.105955899" watchObservedRunningTime="2026-01-28 15:47:34.351938195 +0000 UTC m=+147.106301778" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.353151 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" podStartSLOduration=126.353144509 podStartE2EDuration="2m6.353144509s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.32113054 +0000 UTC m=+147.075494123" watchObservedRunningTime="2026-01-28 15:47:34.353144509 +0000 UTC m=+147.107508092" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.380309 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" podStartSLOduration=126.380290353 podStartE2EDuration="2m6.380290353s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.380224121 +0000 UTC m=+147.134587704" watchObservedRunningTime="2026-01-28 15:47:34.380290353 +0000 UTC m=+147.134653936" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.453229 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.453940 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:34.953926815 +0000 UTC m=+147.708290388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.481180 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5qxgr" podStartSLOduration=126.481163862 podStartE2EDuration="2m6.481163862s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.48077701 +0000 UTC m=+147.235140593" watchObservedRunningTime="2026-01-28 15:47:34.481163862 +0000 UTC m=+147.235527445" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.483315 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-lmds9" podStartSLOduration=126.483307201 podStartE2EDuration="2m6.483307201s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:34.453898065 +0000 UTC m=+147.208261658" watchObservedRunningTime="2026-01-28 15:47:34.483307201 +0000 UTC m=+147.237670784" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.534637 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.534695 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.538277 4811 patch_prober.go:28] interesting pod/apiserver-76f77b778f-rkd9p container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.9:8443/livez\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.538331 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" podUID="bfeb8a90-cb53-45c4-8650-1931e986a5f3" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.9:8443/livez\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.558540 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.558660 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.058640041 +0000 UTC m=+147.813003634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.558884 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.559290 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.059269878 +0000 UTC m=+147.813633451 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.659793 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.659989 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.159964022 +0000 UTC m=+147.914327605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.660152 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.660477 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.160465707 +0000 UTC m=+147.914829290 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.761311 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.761457 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.261416158 +0000 UTC m=+148.015779731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.761561 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.761871 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.26185846 +0000 UTC m=+148.016222043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.780470 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.780519 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.782454 4811 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-b9zf8 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.782532 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" podUID="2fc54817-ea76-451c-8dc9-2bf6684dc2b8" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.862726 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.862926 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.362895623 +0000 UTC m=+148.117259206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.862991 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.863353 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.363346096 +0000 UTC m=+148.117709679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.963677 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.963890 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.463856905 +0000 UTC m=+148.218220488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.964218 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:34 crc kubenswrapper[4811]: E0128 15:47:34.964595 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.464586285 +0000 UTC m=+148.218949868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.973901 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n2nwr"] Jan 28 15:47:34 crc kubenswrapper[4811]: I0128 15:47:34.975003 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.004948 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.023730 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n2nwr"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.065645 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.065811 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.565791013 +0000 UTC m=+148.320154596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.065881 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-utilities\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.065926 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.065972 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-catalog-content\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.065995 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74wxb\" (UniqueName: \"kubernetes.io/projected/fcb7ac6d-c7f1-4c34-9805-e3333249b868-kube-api-access-74wxb\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.066239 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.566224815 +0000 UTC m=+148.320588458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.115421 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-znvwk"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.116639 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.118478 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.128058 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-znvwk"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.164555 4811 patch_prober.go:28] interesting pod/console-operator-58897d9998-vffxs container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.164629 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-vffxs" podUID="4141dca3-d75b-4da2-ad3d-47ab847ff59b" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.166937 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.167114 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.667074984 +0000 UTC m=+148.421438577 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.167305 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-utilities\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.167386 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.167531 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-catalog-content\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.167580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74wxb\" (UniqueName: \"kubernetes.io/projected/fcb7ac6d-c7f1-4c34-9805-e3333249b868-kube-api-access-74wxb\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.167728 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-utilities\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.167738 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.667718902 +0000 UTC m=+148.422082555 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.167980 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-catalog-content\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.192592 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:35 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:35 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:35 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.192661 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.198068 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" event={"ID":"c6911965-9557-4504-8fc4-66b5f3c45391","Type":"ContainerStarted","Data":"c2a604d0a0aaaf036b630f0f8117e02f0fc7daab848996987e9ef60e27795fd4"} Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.213035 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-lb8jv" event={"ID":"d8950881-4230-4e6a-b69d-cf9c3731c634","Type":"ContainerStarted","Data":"9a7ef9893d3af0f12abd2c264d80ec8a1db086b6b961be2d45d0b2ee2a996145"} Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.216735 4811 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-jvb2s container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.216785 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.218618 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.239220 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74wxb\" (UniqueName: \"kubernetes.io/projected/fcb7ac6d-c7f1-4c34-9805-e3333249b868-kube-api-access-74wxb\") pod \"community-operators-n2nwr\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.241716 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bx5jn" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.246608 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.268951 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.269104 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.769072724 +0000 UTC m=+148.523436317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.269213 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.269269 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-utilities\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.269295 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-catalog-content\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.269448 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lwj7\" (UniqueName: \"kubernetes.io/projected/87f8aef8-757b-4a73-9ee5-09751c3b7e92-kube-api-access-5lwj7\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.269840 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.769830765 +0000 UTC m=+148.524194348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.278632 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-lb8jv" podStartSLOduration=8.278614599 podStartE2EDuration="8.278614599s" podCreationTimestamp="2026-01-28 15:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:35.246239541 +0000 UTC m=+148.000603124" watchObservedRunningTime="2026-01-28 15:47:35.278614599 +0000 UTC m=+148.032978182" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.293504 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.350494 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zkcxm"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.375345 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.375831 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lwj7\" (UniqueName: \"kubernetes.io/projected/87f8aef8-757b-4a73-9ee5-09751c3b7e92-kube-api-access-5lwj7\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.376380 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-utilities\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.376404 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-catalog-content\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.376406 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zkcxm"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.376580 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.378337 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:35.878315005 +0000 UTC m=+148.632678588 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.392824 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-utilities\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.396403 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-catalog-content\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.460298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lwj7\" (UniqueName: \"kubernetes.io/projected/87f8aef8-757b-4a73-9ee5-09751c3b7e92-kube-api-access-5lwj7\") pod \"certified-operators-znvwk\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.520193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-catalog-content\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.520848 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-utilities\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.520994 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.521108 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjwfm\" (UniqueName: \"kubernetes.io/projected/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-kube-api-access-sjwfm\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.521653 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.021637883 +0000 UTC m=+148.776001466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.524873 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mqvcx"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.526102 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.541830 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.548573 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqvcx"] Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.608777 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-vffxs" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.622925 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.623101 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-catalog-content\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.623233 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-utilities\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.623273 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lgts\" (UniqueName: \"kubernetes.io/projected/5f806cee-7e54-4979-a1a2-d42a4c892013-kube-api-access-7lgts\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.623293 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjwfm\" (UniqueName: \"kubernetes.io/projected/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-kube-api-access-sjwfm\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.623322 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-catalog-content\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.623338 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-utilities\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.623456 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.123424287 +0000 UTC m=+148.877787870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.624083 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-catalog-content\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.625068 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-utilities\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.659741 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjwfm\" (UniqueName: \"kubernetes.io/projected/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-kube-api-access-sjwfm\") pod \"community-operators-zkcxm\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.726227 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.726521 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lgts\" (UniqueName: \"kubernetes.io/projected/5f806cee-7e54-4979-a1a2-d42a4c892013-kube-api-access-7lgts\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.726557 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-catalog-content\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.726576 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-utilities\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.727000 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-utilities\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.727276 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.227265319 +0000 UTC m=+148.981628902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.727822 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-catalog-content\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.731759 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.755811 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lgts\" (UniqueName: \"kubernetes.io/projected/5f806cee-7e54-4979-a1a2-d42a4c892013-kube-api-access-7lgts\") pod \"certified-operators-mqvcx\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.792733 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.831131 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.831677 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.331658515 +0000 UTC m=+149.086022098 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.864215 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:47:35 crc kubenswrapper[4811]: I0128 15:47:35.933483 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:35 crc kubenswrapper[4811]: E0128 15:47:35.934055 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.434037706 +0000 UTC m=+149.188401289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.035903 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.036283 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.536267792 +0000 UTC m=+149.290631375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.068528 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n2nwr"] Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.090641 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.139662 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume\") pod \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.139715 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-secret-volume\") pod \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.139871 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7426\" (UniqueName: \"kubernetes.io/projected/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-kube-api-access-h7426\") pod \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\" (UID: \"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec\") " Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.140104 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.140401 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.640390072 +0000 UTC m=+149.394753655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.141177 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume" (OuterVolumeSpecName: "config-volume") pod "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" (UID: "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.149791 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" (UID: "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.150556 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-kube-api-access-h7426" (OuterVolumeSpecName: "kube-api-access-h7426") pod "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" (UID: "78a73da1-92b0-4724-90aa-1a8f5aa3e2ec"). InnerVolumeSpecName "kube-api-access-h7426". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.192564 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:36 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:36 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:36 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.192615 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.222624 4811 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-tknqk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.222697 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" podUID="144e6914-af95-49dc-a18e-197adbd1ee56" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.249861 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.250008 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.250134 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.250147 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.250159 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7426\" (UniqueName: \"kubernetes.io/projected/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec-kube-api-access-h7426\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.254164 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.754140628 +0000 UTC m=+149.508504211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.254542 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.299804 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" event={"ID":"78a73da1-92b0-4724-90aa-1a8f5aa3e2ec","Type":"ContainerDied","Data":"9833bb83b45754ba9dfe9acf197478654013ced57787fbdbc34949976dc50fc5"} Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.300107 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9833bb83b45754ba9dfe9acf197478654013ced57787fbdbc34949976dc50fc5" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.299839 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.307871 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerStarted","Data":"77676292929d2e8e75157235a6425b9fd1b69a7252298d785863cb1ec9b5e0b0"} Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.351686 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.351725 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.351755 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.351779 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.352052 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.852041295 +0000 UTC m=+149.606404878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.364016 4811 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-jvb2s container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.364070 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.371163 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.371195 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.371769 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.391814 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" event={"ID":"c6911965-9557-4504-8fc4-66b5f3c45391","Type":"ContainerStarted","Data":"e3c5218287e20659ad968395944a000fd085f235465dbb4c5131a69e07a939bd"} Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.454778 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.456010 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:36.955995139 +0000 UTC m=+149.710358722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.561728 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.562153 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.062139695 +0000 UTC m=+149.816503278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.662944 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.663729 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.664099 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.164078713 +0000 UTC m=+149.918442296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.665426 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.672948 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.694979 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-znvwk"] Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.770277 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.770757 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.270736993 +0000 UTC m=+150.025100576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: W0128 15:47:36.815769 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87f8aef8_757b_4a73_9ee5_09751c3b7e92.slice/crio-5c6fb20fd4486237ddee39f536f173a7671451e7d5bdf115f23441a80ae06c45 WatchSource:0}: Error finding container 5c6fb20fd4486237ddee39f536f173a7671451e7d5bdf115f23441a80ae06c45: Status 404 returned error can't find the container with id 5c6fb20fd4486237ddee39f536f173a7671451e7d5bdf115f23441a80ae06c45 Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.863186 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqvcx"] Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.877024 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.877380 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.377361601 +0000 UTC m=+150.131725184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.950654 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zkcxm"] Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.978095 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:36 crc kubenswrapper[4811]: E0128 15:47:36.978473 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.478460356 +0000 UTC m=+150.232823939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:36 crc kubenswrapper[4811]: I0128 15:47:36.983906 4811 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.081003 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:37 crc kubenswrapper[4811]: E0128 15:47:37.081356 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.581319311 +0000 UTC m=+150.335682894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.129415 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mql9l"] Jan 28 15:47:37 crc kubenswrapper[4811]: E0128 15:47:37.130768 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" containerName="collect-profiles" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.130816 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" containerName="collect-profiles" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.130950 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" containerName="collect-profiles" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.131795 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.135615 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.144233 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mql9l"] Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.188956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-utilities\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.189001 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-catalog-content\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.189029 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfd4c\" (UniqueName: \"kubernetes.io/projected/4bc51efa-36a8-4548-8497-5bc394e0de82-kube-api-access-dfd4c\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.189057 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:37 crc kubenswrapper[4811]: E0128 15:47:37.189419 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.689387089 +0000 UTC m=+150.443750672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.232718 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:37 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:37 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:37 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.232769 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.292988 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.293122 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfd4c\" (UniqueName: \"kubernetes.io/projected/4bc51efa-36a8-4548-8497-5bc394e0de82-kube-api-access-dfd4c\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.293200 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-utilities\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.293227 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-catalog-content\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.293721 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-catalog-content\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: E0128 15:47:37.294093 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.794077885 +0000 UTC m=+150.548441468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.294558 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-utilities\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.337386 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfd4c\" (UniqueName: \"kubernetes.io/projected/4bc51efa-36a8-4548-8497-5bc394e0de82-kube-api-access-dfd4c\") pod \"redhat-marketplace-mql9l\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.344054 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tknqk" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.366924 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znvwk" event={"ID":"87f8aef8-757b-4a73-9ee5-09751c3b7e92","Type":"ContainerStarted","Data":"5c6fb20fd4486237ddee39f536f173a7671451e7d5bdf115f23441a80ae06c45"} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.405810 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:37 crc kubenswrapper[4811]: E0128 15:47:37.406178 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-28 15:47:37.906166334 +0000 UTC m=+150.660529917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hcsxq" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.430734 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerStarted","Data":"bb0210b6977cb36609b1de56dc26110ab8090418dfaa2ad986d51806aa2ad916"} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.438986 4811 generic.go:334] "Generic (PLEG): container finished" podID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerID="bcbac9d59106e06fdfb0529cf9fad9752eac012c0ddd4b0bd12ea4324c9d2641" exitCode=0 Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.439044 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerDied","Data":"bcbac9d59106e06fdfb0529cf9fad9752eac012c0ddd4b0bd12ea4324c9d2641"} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.457010 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.465712 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqvcx" event={"ID":"5f806cee-7e54-4979-a1a2-d42a4c892013","Type":"ContainerStarted","Data":"e08273033fe47c59c1a4a1de52a985f86333cfd04593f223a3b0b3c2fc48a72f"} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.493188 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" event={"ID":"c6911965-9557-4504-8fc4-66b5f3c45391","Type":"ContainerStarted","Data":"ef7dfa606d7c3708c24a820c75377557277b4005b255884aa9bacc756a47e9c6"} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.493277 4811 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-28T15:47:36.983924018Z","Handler":null,"Name":""} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.506725 4811 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.506855 4811 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.507414 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.508045 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e75db49782f991e2bf624a082773eb4ea9d569cd7079939b9ab37499f4eedcbd"} Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.523030 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.533955 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5gjk2"] Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.534915 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.583616 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gjk2"] Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.600903 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.612517 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvkm4\" (UniqueName: \"kubernetes.io/projected/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-kube-api-access-pvkm4\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.618061 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-utilities\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.619492 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.619530 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-catalog-content\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.666529 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.666593 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.722844 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvkm4\" (UniqueName: \"kubernetes.io/projected/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-kube-api-access-pvkm4\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.723238 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-utilities\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.723284 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-catalog-content\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.723785 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-catalog-content\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.724283 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-utilities\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.745547 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvkm4\" (UniqueName: \"kubernetes.io/projected/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-kube-api-access-pvkm4\") pod \"redhat-marketplace-5gjk2\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.814002 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hcsxq\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.886809 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.932251 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:47:37 crc kubenswrapper[4811]: I0128 15:47:37.987288 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mql9l"] Jan 28 15:47:38 crc kubenswrapper[4811]: W0128 15:47:38.067007 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bc51efa_36a8_4548_8497_5bc394e0de82.slice/crio-e6e8b43d233cf622c3c44f305c6cab9f322f2f10e4951c2c7247179eef5140e1 WatchSource:0}: Error finding container e6e8b43d233cf622c3c44f305c6cab9f322f2f10e4951c2c7247179eef5140e1: Status 404 returned error can't find the container with id e6e8b43d233cf622c3c44f305c6cab9f322f2f10e4951c2c7247179eef5140e1 Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.127614 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v759z"] Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.139806 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v759z"] Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.139924 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.142153 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.192882 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:38 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:38 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:38 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.192935 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.239124 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8nl5\" (UniqueName: \"kubernetes.io/projected/2195bb89-ac75-41a6-8e39-77506c50c101-kube-api-access-g8nl5\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.239537 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-utilities\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.239585 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-catalog-content\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.281396 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hcsxq"] Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.340622 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8nl5\" (UniqueName: \"kubernetes.io/projected/2195bb89-ac75-41a6-8e39-77506c50c101-kube-api-access-g8nl5\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.340709 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-utilities\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.340747 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-catalog-content\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.341229 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-catalog-content\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.341814 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-utilities\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.383059 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.396700 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8nl5\" (UniqueName: \"kubernetes.io/projected/2195bb89-ac75-41a6-8e39-77506c50c101-kube-api-access-g8nl5\") pod \"redhat-operators-v759z\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.459911 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.481557 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gjk2"] Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.540545 4811 generic.go:334] "Generic (PLEG): container finished" podID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerID="64acaf8bed1f63aa816dfb7143ffa1aa0443b236548a9aa7c1a65e6ab188de80" exitCode=0 Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.540894 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqvcx" event={"ID":"5f806cee-7e54-4979-a1a2-d42a4c892013","Type":"ContainerDied","Data":"64acaf8bed1f63aa816dfb7143ffa1aa0443b236548a9aa7c1a65e6ab188de80"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.548461 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9qvvv"] Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.550485 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.576685 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" event={"ID":"c6911965-9557-4504-8fc4-66b5f3c45391","Type":"ContainerStarted","Data":"e392a75946a2386929db2a7bcc2957b43e67cd819cc16351dcbda61ddfcd1d37"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.603853 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ed4139eeac0d31b9ad3953f5208315b1aaf4d2811a38db4153bdef6acc8b4db3"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.604195 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.608989 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b1b5b405d355313ab21418c3ded2706baabf5694fafda2792e96d8a73f47bfef"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.609024 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"9a3b003bea9f5bb36379a4093e9a517e3b698b2eb493c7d6f7941ec731882439"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.620440 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9qvvv"] Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.624768 4811 generic.go:334] "Generic (PLEG): container finished" podID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerID="31a8e614278a68693577f9947ed387f5461fc2275375a8e9c9f56799ca085a51" exitCode=0 Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.624847 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerDied","Data":"31a8e614278a68693577f9947ed387f5461fc2275375a8e9c9f56799ca085a51"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.624907 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerStarted","Data":"e6e8b43d233cf622c3c44f305c6cab9f322f2f10e4951c2c7247179eef5140e1"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.647339 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-catalog-content\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.647388 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-utilities\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.647422 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbzq7\" (UniqueName: \"kubernetes.io/projected/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-kube-api-access-cbzq7\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.655421 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerStarted","Data":"3c40a6448bd749faf5f3ebf52b85d5a951b7dec7e5867d07c8d8c14141294733"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.661661 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"8d72b876a873cb7ddce87784451348877f016bc78256f757bc654336ccea4061"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.661718 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3cfb34a2542f1c683063bed26894ff959164927ae18b683e6a7f5f1659784795"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.673955 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-q66fr" podStartSLOduration=11.673930952 podStartE2EDuration="11.673930952s" podCreationTimestamp="2026-01-28 15:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:38.666969489 +0000 UTC m=+151.421333082" watchObservedRunningTime="2026-01-28 15:47:38.673930952 +0000 UTC m=+151.428294535" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.689902 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" event={"ID":"ec729dda-4b2d-458b-8e11-6cc8beacb717","Type":"ContainerStarted","Data":"eb62d835d4313f690614c429c59b7c8d5820ee8272260a0500e58c5effc81126"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.690053 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.713201 4811 generic.go:334] "Generic (PLEG): container finished" podID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerID="a8aa3039e298be459caf9356028ad13f97f7f1d5712eec3b61c2cec06187e3c7" exitCode=0 Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.713265 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znvwk" event={"ID":"87f8aef8-757b-4a73-9ee5-09751c3b7e92","Type":"ContainerDied","Data":"a8aa3039e298be459caf9356028ad13f97f7f1d5712eec3b61c2cec06187e3c7"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.720241 4811 generic.go:334] "Generic (PLEG): container finished" podID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerID="8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4" exitCode=0 Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.720279 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerDied","Data":"8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4"} Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.748741 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-catalog-content\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.748784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-utilities\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.748811 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbzq7\" (UniqueName: \"kubernetes.io/projected/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-kube-api-access-cbzq7\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.749663 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-utilities\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.749822 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-catalog-content\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.777493 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbzq7\" (UniqueName: \"kubernetes.io/projected/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-kube-api-access-cbzq7\") pod \"redhat-operators-9qvvv\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.887358 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" podStartSLOduration=130.887335075 podStartE2EDuration="2m10.887335075s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:38.860940402 +0000 UTC m=+151.615303985" watchObservedRunningTime="2026-01-28 15:47:38.887335075 +0000 UTC m=+151.641698658" Jan 28 15:47:38 crc kubenswrapper[4811]: I0128 15:47:38.991659 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.007074 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v759z"] Jan 28 15:47:39 crc kubenswrapper[4811]: W0128 15:47:39.027217 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2195bb89_ac75_41a6_8e39_77506c50c101.slice/crio-3fddb4cb5d7dcfdf2bf89ff773a45ed240a826a4bdae9b0783eab49a47c7c6cf WatchSource:0}: Error finding container 3fddb4cb5d7dcfdf2bf89ff773a45ed240a826a4bdae9b0783eab49a47c7c6cf: Status 404 returned error can't find the container with id 3fddb4cb5d7dcfdf2bf89ff773a45ed240a826a4bdae9b0783eab49a47c7c6cf Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.191908 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:39 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:39 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:39 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.192418 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.252880 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9qvvv"] Jan 28 15:47:39 crc kubenswrapper[4811]: W0128 15:47:39.264291 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod234bbc6b_c1e7_4dd5_8a6c_cb3c8782f6c6.slice/crio-7cde01621c6236e04201c18df537fab656f73fbb1602aeb17731ec1a4de01723 WatchSource:0}: Error finding container 7cde01621c6236e04201c18df537fab656f73fbb1602aeb17731ec1a4de01723: Status 404 returned error can't find the container with id 7cde01621c6236e04201c18df537fab656f73fbb1602aeb17731ec1a4de01723 Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.535774 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.540365 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rkd9p" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.660753 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.660816 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.662032 4811 patch_prober.go:28] interesting pod/console-f9d7485db-xcx2z container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.662084 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xcx2z" podUID="cfa466bf-9337-45cd-a739-c0d3b5521e13" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.773360 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerID="ac2c6c37b0f2fb9630e6864a283edc8f7782413efd477b7b8cc0f28453012a8d" exitCode=0 Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.773517 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerDied","Data":"ac2c6c37b0f2fb9630e6864a283edc8f7782413efd477b7b8cc0f28453012a8d"} Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.779830 4811 generic.go:334] "Generic (PLEG): container finished" podID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerID="95b17f0aa2e93ea4bb9c4c1409fef09d4bcd76e8997c1677f496e2411afb9dbd" exitCode=0 Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.779921 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerDied","Data":"95b17f0aa2e93ea4bb9c4c1409fef09d4bcd76e8997c1677f496e2411afb9dbd"} Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.779949 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerStarted","Data":"7cde01621c6236e04201c18df537fab656f73fbb1602aeb17731ec1a4de01723"} Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.782287 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" event={"ID":"ec729dda-4b2d-458b-8e11-6cc8beacb717","Type":"ContainerStarted","Data":"ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452"} Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.786173 4811 generic.go:334] "Generic (PLEG): container finished" podID="2195bb89-ac75-41a6-8e39-77506c50c101" containerID="3ec267fb51f00ed9e056053ed2175509de5db7e2491aa0fb80809e5c848a9baa" exitCode=0 Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.786308 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerDied","Data":"3ec267fb51f00ed9e056053ed2175509de5db7e2491aa0fb80809e5c848a9baa"} Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.786330 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerStarted","Data":"3fddb4cb5d7dcfdf2bf89ff773a45ed240a826a4bdae9b0783eab49a47c7c6cf"} Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.787348 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:39 crc kubenswrapper[4811]: I0128 15:47:39.798661 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b9zf8" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.088427 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.089185 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.091939 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.092214 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.100990 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.119548 4811 patch_prober.go:28] interesting pod/downloads-7954f5f757-dssbm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.119611 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dssbm" podUID="02bc99e0-e3a2-4c8b-b936-b5d7535412d0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.119629 4811 patch_prober.go:28] interesting pod/downloads-7954f5f757-dssbm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.119689 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dssbm" podUID="02bc99e0-e3a2-4c8b-b936-b5d7535412d0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.179202 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.179308 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.186678 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.192540 4811 patch_prober.go:28] interesting pod/router-default-5444994796-87g76 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 28 15:47:40 crc kubenswrapper[4811]: [-]has-synced failed: reason withheld Jan 28 15:47:40 crc kubenswrapper[4811]: [+]process-running ok Jan 28 15:47:40 crc kubenswrapper[4811]: healthz check failed Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.192605 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87g76" podUID="1cc605f1-f5a1-4f06-b05c-fb0881684445" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.280585 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.280688 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.281889 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.301594 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.412697 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.590124 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.735310 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.773461 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.774242 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.780518 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.780753 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.784649 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 28 15:47:40 crc kubenswrapper[4811]: W0128 15:47:40.828441 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod29e1c319_f5ba_4c0f_9b57_0efd49fafd5e.slice/crio-645975422f7b902c6e977295b120333af0656ac551163f642d2711a3dc7d79d3 WatchSource:0}: Error finding container 645975422f7b902c6e977295b120333af0656ac551163f642d2711a3dc7d79d3: Status 404 returned error can't find the container with id 645975422f7b902c6e977295b120333af0656ac551163f642d2711a3dc7d79d3 Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.889185 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/524a7152-93ea-4930-af9c-37621b569988-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:40 crc kubenswrapper[4811]: I0128 15:47:40.889339 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/524a7152-93ea-4930-af9c-37621b569988-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:40.992066 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/524a7152-93ea-4930-af9c-37621b569988-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:40.992169 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/524a7152-93ea-4930-af9c-37621b569988-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:40.994042 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/524a7152-93ea-4930-af9c-37621b569988-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.024778 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/524a7152-93ea-4930-af9c-37621b569988-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.136727 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.191974 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.194510 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-87g76" Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.680833 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.827903 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e","Type":"ContainerStarted","Data":"69d1f564d3866a7c6d8a890579b0d53071374c322cb4141b801ce950635c9949"} Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.827954 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e","Type":"ContainerStarted","Data":"645975422f7b902c6e977295b120333af0656ac551163f642d2711a3dc7d79d3"} Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.847276 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"524a7152-93ea-4930-af9c-37621b569988","Type":"ContainerStarted","Data":"513c1d2e3ef7e0527b19409e9bb8118430c248b2cbf543526a5322aa29b74856"} Jan 28 15:47:41 crc kubenswrapper[4811]: I0128 15:47:41.857663 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.857644125 podStartE2EDuration="1.857644125s" podCreationTimestamp="2026-01-28 15:47:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:41.854884918 +0000 UTC m=+154.609248501" watchObservedRunningTime="2026-01-28 15:47:41.857644125 +0000 UTC m=+154.612007708" Jan 28 15:47:42 crc kubenswrapper[4811]: I0128 15:47:42.871373 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"524a7152-93ea-4930-af9c-37621b569988","Type":"ContainerStarted","Data":"17654e88b39faede49c2018c6935ade38f3dcc6979a900a3e2d7086b82d64175"} Jan 28 15:47:42 crc kubenswrapper[4811]: I0128 15:47:42.883774 4811 generic.go:334] "Generic (PLEG): container finished" podID="29e1c319-f5ba-4c0f-9b57-0efd49fafd5e" containerID="69d1f564d3866a7c6d8a890579b0d53071374c322cb4141b801ce950635c9949" exitCode=0 Jan 28 15:47:42 crc kubenswrapper[4811]: I0128 15:47:42.883838 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e","Type":"ContainerDied","Data":"69d1f564d3866a7c6d8a890579b0d53071374c322cb4141b801ce950635c9949"} Jan 28 15:47:42 crc kubenswrapper[4811]: I0128 15:47:42.887951 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.887939104 podStartE2EDuration="2.887939104s" podCreationTimestamp="2026-01-28 15:47:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:47:42.886328599 +0000 UTC m=+155.640692182" watchObservedRunningTime="2026-01-28 15:47:42.887939104 +0000 UTC m=+155.642302687" Jan 28 15:47:43 crc kubenswrapper[4811]: I0128 15:47:43.898638 4811 generic.go:334] "Generic (PLEG): container finished" podID="524a7152-93ea-4930-af9c-37621b569988" containerID="17654e88b39faede49c2018c6935ade38f3dcc6979a900a3e2d7086b82d64175" exitCode=0 Jan 28 15:47:43 crc kubenswrapper[4811]: I0128 15:47:43.899249 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"524a7152-93ea-4930-af9c-37621b569988","Type":"ContainerDied","Data":"17654e88b39faede49c2018c6935ade38f3dcc6979a900a3e2d7086b82d64175"} Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.313605 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.359229 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kube-api-access\") pod \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.359355 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kubelet-dir\") pod \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\" (UID: \"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e\") " Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.359882 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "29e1c319-f5ba-4c0f-9b57-0efd49fafd5e" (UID: "29e1c319-f5ba-4c0f-9b57-0efd49fafd5e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.389712 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "29e1c319-f5ba-4c0f-9b57-0efd49fafd5e" (UID: "29e1c319-f5ba-4c0f-9b57-0efd49fafd5e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.461921 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.461983 4811 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29e1c319-f5ba-4c0f-9b57-0efd49fafd5e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.925692 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.925745 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29e1c319-f5ba-4c0f-9b57-0efd49fafd5e","Type":"ContainerDied","Data":"645975422f7b902c6e977295b120333af0656ac551163f642d2711a3dc7d79d3"} Jan 28 15:47:44 crc kubenswrapper[4811]: I0128 15:47:44.925782 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="645975422f7b902c6e977295b120333af0656ac551163f642d2711a3dc7d79d3" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.409618 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.481807 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/524a7152-93ea-4930-af9c-37621b569988-kubelet-dir\") pod \"524a7152-93ea-4930-af9c-37621b569988\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.481922 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/524a7152-93ea-4930-af9c-37621b569988-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "524a7152-93ea-4930-af9c-37621b569988" (UID: "524a7152-93ea-4930-af9c-37621b569988"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.481933 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/524a7152-93ea-4930-af9c-37621b569988-kube-api-access\") pod \"524a7152-93ea-4930-af9c-37621b569988\" (UID: \"524a7152-93ea-4930-af9c-37621b569988\") " Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.482184 4811 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/524a7152-93ea-4930-af9c-37621b569988-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.496648 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/524a7152-93ea-4930-af9c-37621b569988-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "524a7152-93ea-4930-af9c-37621b569988" (UID: "524a7152-93ea-4930-af9c-37621b569988"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.583860 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/524a7152-93ea-4930-af9c-37621b569988-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.679506 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-lb8jv" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.936125 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"524a7152-93ea-4930-af9c-37621b569988","Type":"ContainerDied","Data":"513c1d2e3ef7e0527b19409e9bb8118430c248b2cbf543526a5322aa29b74856"} Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.936162 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="513c1d2e3ef7e0527b19409e9bb8118430c248b2cbf543526a5322aa29b74856" Jan 28 15:47:45 crc kubenswrapper[4811]: I0128 15:47:45.936214 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 28 15:47:49 crc kubenswrapper[4811]: I0128 15:47:49.662856 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:49 crc kubenswrapper[4811]: I0128 15:47:49.667217 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 15:47:49 crc kubenswrapper[4811]: I0128 15:47:49.957151 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:49 crc kubenswrapper[4811]: I0128 15:47:49.969049 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb3aac9c-e96e-4a5e-beb5-aeff56394467-metrics-certs\") pod \"network-metrics-daemon-t5mlc\" (UID: \"bb3aac9c-e96e-4a5e-beb5-aeff56394467\") " pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:50 crc kubenswrapper[4811]: I0128 15:47:50.118021 4811 patch_prober.go:28] interesting pod/downloads-7954f5f757-dssbm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Jan 28 15:47:50 crc kubenswrapper[4811]: I0128 15:47:50.118058 4811 patch_prober.go:28] interesting pod/downloads-7954f5f757-dssbm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Jan 28 15:47:50 crc kubenswrapper[4811]: I0128 15:47:50.118115 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dssbm" podUID="02bc99e0-e3a2-4c8b-b936-b5d7535412d0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" Jan 28 15:47:50 crc kubenswrapper[4811]: I0128 15:47:50.118085 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dssbm" podUID="02bc99e0-e3a2-4c8b-b936-b5d7535412d0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.29:8080/\": dial tcp 10.217.0.29:8080: connect: connection refused" Jan 28 15:47:50 crc kubenswrapper[4811]: I0128 15:47:50.255286 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5mlc" Jan 28 15:47:54 crc kubenswrapper[4811]: I0128 15:47:54.109390 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-27m45"] Jan 28 15:47:54 crc kubenswrapper[4811]: I0128 15:47:54.109620 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" containerID="cri-o://1cb15ae328d3dbb0e4c2fb5ae2b6c3ae625ffc1f6afa3f2e37173551a93e6425" gracePeriod=30 Jan 28 15:47:54 crc kubenswrapper[4811]: I0128 15:47:54.149963 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb"] Jan 28 15:47:54 crc kubenswrapper[4811]: I0128 15:47:54.150384 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" podUID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" containerName="route-controller-manager" containerID="cri-o://2b84d2c595f459896dede0b5f34f9ffd4c6ab2ace170ff6016402dd78ac08c27" gracePeriod=30 Jan 28 15:47:56 crc kubenswrapper[4811]: I0128 15:47:56.013985 4811 generic.go:334] "Generic (PLEG): container finished" podID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" containerID="2b84d2c595f459896dede0b5f34f9ffd4c6ab2ace170ff6016402dd78ac08c27" exitCode=0 Jan 28 15:47:56 crc kubenswrapper[4811]: I0128 15:47:56.014065 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" event={"ID":"f2fae918-6d12-482a-9cf1-9cb27efb5f8c","Type":"ContainerDied","Data":"2b84d2c595f459896dede0b5f34f9ffd4c6ab2ace170ff6016402dd78ac08c27"} Jan 28 15:47:56 crc kubenswrapper[4811]: I0128 15:47:56.016395 4811 generic.go:334] "Generic (PLEG): container finished" podID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerID="1cb15ae328d3dbb0e4c2fb5ae2b6c3ae625ffc1f6afa3f2e37173551a93e6425" exitCode=0 Jan 28 15:47:56 crc kubenswrapper[4811]: I0128 15:47:56.016497 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" event={"ID":"c5e6c2fc-f99a-443f-a0c6-165478a1e838","Type":"ContainerDied","Data":"1cb15ae328d3dbb0e4c2fb5ae2b6c3ae625ffc1f6afa3f2e37173551a93e6425"} Jan 28 15:47:57 crc kubenswrapper[4811]: I0128 15:47:57.894188 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:48:00 crc kubenswrapper[4811]: I0128 15:48:00.123579 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-dssbm" Jan 28 15:48:00 crc kubenswrapper[4811]: I0128 15:48:00.403801 4811 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-27m45 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 28 15:48:00 crc kubenswrapper[4811]: I0128 15:48:00.403870 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 28 15:48:00 crc kubenswrapper[4811]: I0128 15:48:00.489660 4811 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dwxpb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 28 15:48:00 crc kubenswrapper[4811]: I0128 15:48:00.489746 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" podUID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 28 15:48:00 crc kubenswrapper[4811]: I0128 15:48:00.965297 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.001835 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg"] Jan 28 15:48:01 crc kubenswrapper[4811]: E0128 15:48:01.003175 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" containerName="route-controller-manager" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.003230 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" containerName="route-controller-manager" Jan 28 15:48:01 crc kubenswrapper[4811]: E0128 15:48:01.003255 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="524a7152-93ea-4930-af9c-37621b569988" containerName="pruner" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.003264 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="524a7152-93ea-4930-af9c-37621b569988" containerName="pruner" Jan 28 15:48:01 crc kubenswrapper[4811]: E0128 15:48:01.003291 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e1c319-f5ba-4c0f-9b57-0efd49fafd5e" containerName="pruner" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.003298 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e1c319-f5ba-4c0f-9b57-0efd49fafd5e" containerName="pruner" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.003746 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e1c319-f5ba-4c0f-9b57-0efd49fafd5e" containerName="pruner" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.003768 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="524a7152-93ea-4930-af9c-37621b569988" containerName="pruner" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.003776 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" containerName="route-controller-manager" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.004584 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.026387 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg"] Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.051106 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" event={"ID":"f2fae918-6d12-482a-9cf1-9cb27efb5f8c","Type":"ContainerDied","Data":"137c6c88cb9d3c3a38787e835c1fac729f35a10601f2c89cca8267959c178335"} Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.051192 4811 scope.go:117] "RemoveContainer" containerID="2b84d2c595f459896dede0b5f34f9ffd4c6ab2ace170ff6016402dd78ac08c27" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.051220 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.162212 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-client-ca\") pod \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.162303 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-serving-cert\") pod \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.162343 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-config\") pod \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.162509 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd67p\" (UniqueName: \"kubernetes.io/projected/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-kube-api-access-jd67p\") pod \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\" (UID: \"f2fae918-6d12-482a-9cf1-9cb27efb5f8c\") " Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.163839 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-config" (OuterVolumeSpecName: "config") pod "f2fae918-6d12-482a-9cf1-9cb27efb5f8c" (UID: "f2fae918-6d12-482a-9cf1-9cb27efb5f8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.164146 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-client-ca\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.164235 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twlwj\" (UniqueName: \"kubernetes.io/projected/c5c0faf4-e93d-491d-9262-4fb8b542d77c-kube-api-access-twlwj\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.164304 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-config\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.164369 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c0faf4-e93d-491d-9262-4fb8b542d77c-serving-cert\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.164423 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.165066 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-client-ca" (OuterVolumeSpecName: "client-ca") pod "f2fae918-6d12-482a-9cf1-9cb27efb5f8c" (UID: "f2fae918-6d12-482a-9cf1-9cb27efb5f8c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.170323 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f2fae918-6d12-482a-9cf1-9cb27efb5f8c" (UID: "f2fae918-6d12-482a-9cf1-9cb27efb5f8c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.172615 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-kube-api-access-jd67p" (OuterVolumeSpecName: "kube-api-access-jd67p") pod "f2fae918-6d12-482a-9cf1-9cb27efb5f8c" (UID: "f2fae918-6d12-482a-9cf1-9cb27efb5f8c"). InnerVolumeSpecName "kube-api-access-jd67p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.265771 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-client-ca\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.266148 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twlwj\" (UniqueName: \"kubernetes.io/projected/c5c0faf4-e93d-491d-9262-4fb8b542d77c-kube-api-access-twlwj\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.266279 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-config\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.266389 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c0faf4-e93d-491d-9262-4fb8b542d77c-serving-cert\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.266541 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.266654 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.266744 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd67p\" (UniqueName: \"kubernetes.io/projected/f2fae918-6d12-482a-9cf1-9cb27efb5f8c-kube-api-access-jd67p\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.267839 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-client-ca\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.269519 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-config\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.271928 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c0faf4-e93d-491d-9262-4fb8b542d77c-serving-cert\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.286262 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twlwj\" (UniqueName: \"kubernetes.io/projected/c5c0faf4-e93d-491d-9262-4fb8b542d77c-kube-api-access-twlwj\") pod \"route-controller-manager-75cb47dc7f-v6qgg\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.350920 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.384625 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb"] Jan 28 15:48:01 crc kubenswrapper[4811]: I0128 15:48:01.389300 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dwxpb"] Jan 28 15:48:02 crc kubenswrapper[4811]: I0128 15:48:02.348169 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2fae918-6d12-482a-9cf1-9cb27efb5f8c" path="/var/lib/kubelet/pods/f2fae918-6d12-482a-9cf1-9cb27efb5f8c/volumes" Jan 28 15:48:03 crc kubenswrapper[4811]: I0128 15:48:03.087038 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:48:03 crc kubenswrapper[4811]: I0128 15:48:03.087104 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:48:09 crc kubenswrapper[4811]: I0128 15:48:09.901138 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xg589" Jan 28 15:48:11 crc kubenswrapper[4811]: I0128 15:48:11.404316 4811 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-27m45 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 28 15:48:11 crc kubenswrapper[4811]: I0128 15:48:11.404777 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 28 15:48:14 crc kubenswrapper[4811]: I0128 15:48:14.164333 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg"] Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.457390 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.481473 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6cc89988fc-zxgtl"] Jan 28 15:48:16 crc kubenswrapper[4811]: E0128 15:48:16.481751 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.481775 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.481896 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" containerName="controller-manager" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.482349 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.490199 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6cc89988fc-zxgtl"] Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588048 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e6c2fc-f99a-443f-a0c6-165478a1e838-serving-cert\") pod \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588127 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ktm6\" (UniqueName: \"kubernetes.io/projected/c5e6c2fc-f99a-443f-a0c6-165478a1e838-kube-api-access-9ktm6\") pod \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588263 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-config\") pod \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588304 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-client-ca\") pod \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588342 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-proxy-ca-bundles\") pod \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\" (UID: \"c5e6c2fc-f99a-443f-a0c6-165478a1e838\") " Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588818 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-serving-cert\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588866 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-client-ca\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588946 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-config\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.588996 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-proxy-ca-bundles\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.589032 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrr52\" (UniqueName: \"kubernetes.io/projected/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-kube-api-access-zrr52\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.589457 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-client-ca" (OuterVolumeSpecName: "client-ca") pod "c5e6c2fc-f99a-443f-a0c6-165478a1e838" (UID: "c5e6c2fc-f99a-443f-a0c6-165478a1e838"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.589581 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-config" (OuterVolumeSpecName: "config") pod "c5e6c2fc-f99a-443f-a0c6-165478a1e838" (UID: "c5e6c2fc-f99a-443f-a0c6-165478a1e838"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.595698 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5e6c2fc-f99a-443f-a0c6-165478a1e838-kube-api-access-9ktm6" (OuterVolumeSpecName: "kube-api-access-9ktm6") pod "c5e6c2fc-f99a-443f-a0c6-165478a1e838" (UID: "c5e6c2fc-f99a-443f-a0c6-165478a1e838"). InnerVolumeSpecName "kube-api-access-9ktm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690010 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-serving-cert\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690056 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-client-ca\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690101 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-config\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690116 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-proxy-ca-bundles\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690136 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrr52\" (UniqueName: \"kubernetes.io/projected/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-kube-api-access-zrr52\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690211 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690222 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.690231 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ktm6\" (UniqueName: \"kubernetes.io/projected/c5e6c2fc-f99a-443f-a0c6-165478a1e838-kube-api-access-9ktm6\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.691904 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-client-ca\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.692081 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-proxy-ca-bundles\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.692280 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-config\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.695459 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-serving-cert\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.717976 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrr52\" (UniqueName: \"kubernetes.io/projected/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-kube-api-access-zrr52\") pod \"controller-manager-6cc89988fc-zxgtl\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.803282 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.824192 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c5e6c2fc-f99a-443f-a0c6-165478a1e838" (UID: "c5e6c2fc-f99a-443f-a0c6-165478a1e838"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.824316 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5e6c2fc-f99a-443f-a0c6-165478a1e838-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c5e6c2fc-f99a-443f-a0c6-165478a1e838" (UID: "c5e6c2fc-f99a-443f-a0c6-165478a1e838"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.827939 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.893114 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5e6c2fc-f99a-443f-a0c6-165478a1e838-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:16 crc kubenswrapper[4811]: I0128 15:48:16.893149 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e6c2fc-f99a-443f-a0c6-165478a1e838-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:48:17 crc kubenswrapper[4811]: I0128 15:48:17.151396 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" event={"ID":"c5e6c2fc-f99a-443f-a0c6-165478a1e838","Type":"ContainerDied","Data":"dfde520ddc1c96bd46c977cbcfee90f16853289dd792ce0b49dab50952d7fe9f"} Jan 28 15:48:17 crc kubenswrapper[4811]: I0128 15:48:17.151454 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-27m45" Jan 28 15:48:17 crc kubenswrapper[4811]: I0128 15:48:17.180213 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-27m45"] Jan 28 15:48:17 crc kubenswrapper[4811]: I0128 15:48:17.183055 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-27m45"] Jan 28 15:48:18 crc kubenswrapper[4811]: I0128 15:48:18.348618 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5e6c2fc-f99a-443f-a0c6-165478a1e838" path="/var/lib/kubelet/pods/c5e6c2fc-f99a-443f-a0c6-165478a1e838/volumes" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.768347 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.769726 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.773024 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.773224 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.781561 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.870548 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91f87058-06bb-4c9d-bff6-5f817c426b8b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.870748 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91f87058-06bb-4c9d-bff6-5f817c426b8b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.971968 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91f87058-06bb-4c9d-bff6-5f817c426b8b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.972066 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91f87058-06bb-4c9d-bff6-5f817c426b8b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.972138 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91f87058-06bb-4c9d-bff6-5f817c426b8b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:19 crc kubenswrapper[4811]: I0128 15:48:19.993566 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91f87058-06bb-4c9d-bff6-5f817c426b8b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:20 crc kubenswrapper[4811]: I0128 15:48:20.108020 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:48:24 crc kubenswrapper[4811]: I0128 15:48:24.767620 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 28 15:48:24 crc kubenswrapper[4811]: I0128 15:48:24.774965 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:24 crc kubenswrapper[4811]: I0128 15:48:24.782607 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 28 15:48:24 crc kubenswrapper[4811]: I0128 15:48:24.942406 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kubelet-dir\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:24 crc kubenswrapper[4811]: I0128 15:48:24.942627 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-var-lock\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:24 crc kubenswrapper[4811]: I0128 15:48:24.942720 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kube-api-access\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: E0128 15:48:25.016874 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 28 15:48:25 crc kubenswrapper[4811]: E0128 15:48:25.017071 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dfd4c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mql9l_openshift-marketplace(4bc51efa-36a8-4548-8497-5bc394e0de82): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:48:25 crc kubenswrapper[4811]: E0128 15:48:25.018267 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mql9l" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.044425 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-var-lock\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.044566 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-var-lock\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.045001 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kube-api-access\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.045065 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kubelet-dir\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.045126 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kubelet-dir\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.063079 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kube-api-access\") pod \"installer-9-crc\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:25 crc kubenswrapper[4811]: I0128 15:48:25.108506 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:48:33 crc kubenswrapper[4811]: I0128 15:48:33.087327 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:48:33 crc kubenswrapper[4811]: I0128 15:48:33.087988 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:48:33 crc kubenswrapper[4811]: I0128 15:48:33.088055 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:48:33 crc kubenswrapper[4811]: I0128 15:48:33.088730 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 15:48:33 crc kubenswrapper[4811]: I0128 15:48:33.088852 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7" gracePeriod=600 Jan 28 15:48:33 crc kubenswrapper[4811]: E0128 15:48:33.107075 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 28 15:48:33 crc kubenswrapper[4811]: E0128 15:48:33.107234 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pvkm4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-5gjk2_openshift-marketplace(ad2a3f3f-a2fa-4868-a770-c114e995ddbc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:48:33 crc kubenswrapper[4811]: E0128 15:48:33.108468 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-5gjk2" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" Jan 28 15:48:33 crc kubenswrapper[4811]: E0128 15:48:33.939557 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mql9l" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" Jan 28 15:48:33 crc kubenswrapper[4811]: E0128 15:48:33.939669 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-5gjk2" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" Jan 28 15:48:37 crc kubenswrapper[4811]: E0128 15:48:37.037307 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 28 15:48:37 crc kubenswrapper[4811]: E0128 15:48:37.038172 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-74wxb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-n2nwr_openshift-marketplace(fcb7ac6d-c7f1-4c34-9805-e3333249b868): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:48:37 crc kubenswrapper[4811]: E0128 15:48:37.039522 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-n2nwr" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" Jan 28 15:48:45 crc kubenswrapper[4811]: I0128 15:48:45.290958 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7" exitCode=0 Jan 28 15:48:45 crc kubenswrapper[4811]: I0128 15:48:45.291212 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7"} Jan 28 15:48:50 crc kubenswrapper[4811]: E0128 15:48:50.039255 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 28 15:48:50 crc kubenswrapper[4811]: E0128 15:48:50.039764 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cbzq7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9qvvv_openshift-marketplace(234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:48:50 crc kubenswrapper[4811]: E0128 15:48:50.041173 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-9qvvv" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" Jan 28 15:48:58 crc kubenswrapper[4811]: E0128 15:48:58.616959 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9qvvv" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" Jan 28 15:48:58 crc kubenswrapper[4811]: I0128 15:48:58.630059 4811 scope.go:117] "RemoveContainer" containerID="1cb15ae328d3dbb0e4c2fb5ae2b6c3ae625ffc1f6afa3f2e37173551a93e6425" Jan 28 15:48:58 crc kubenswrapper[4811]: I0128 15:48:58.806847 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-t5mlc"] Jan 28 15:48:59 crc kubenswrapper[4811]: W0128 15:48:59.071741 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb3aac9c_e96e_4a5e_beb5_aeff56394467.slice/crio-b538dfeaf26621a453dae1ec89ccdb48a8a52eebb686fab1f3d706c1eb706213 WatchSource:0}: Error finding container b538dfeaf26621a453dae1ec89ccdb48a8a52eebb686fab1f3d706c1eb706213: Status 404 returned error can't find the container with id b538dfeaf26621a453dae1ec89ccdb48a8a52eebb686fab1f3d706c1eb706213 Jan 28 15:48:59 crc kubenswrapper[4811]: I0128 15:48:59.311539 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 28 15:48:59 crc kubenswrapper[4811]: I0128 15:48:59.396852 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"91f87058-06bb-4c9d-bff6-5f817c426b8b","Type":"ContainerStarted","Data":"fe9400446a7bc905c7f107ddb8a07cee2a5a57b5ad6c90af44795312417c9332"} Jan 28 15:48:59 crc kubenswrapper[4811]: I0128 15:48:59.403822 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" event={"ID":"bb3aac9c-e96e-4a5e-beb5-aeff56394467","Type":"ContainerStarted","Data":"b538dfeaf26621a453dae1ec89ccdb48a8a52eebb686fab1f3d706c1eb706213"} Jan 28 15:48:59 crc kubenswrapper[4811]: E0128 15:48:59.529597 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 28 15:48:59 crc kubenswrapper[4811]: E0128 15:48:59.529821 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5lwj7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-znvwk_openshift-marketplace(87f8aef8-757b-4a73-9ee5-09751c3b7e92): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:48:59 crc kubenswrapper[4811]: E0128 15:48:59.531860 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-znvwk" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" Jan 28 15:48:59 crc kubenswrapper[4811]: I0128 15:48:59.572169 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg"] Jan 28 15:48:59 crc kubenswrapper[4811]: I0128 15:48:59.584955 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 28 15:48:59 crc kubenswrapper[4811]: W0128 15:48:59.589916 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod89a4207c_2c5f_428a_ae4f_ae0adbc4a6e7.slice/crio-a4400e59068fd205c64b233d89ff91af490a68c9715a92a33a7467b3cc423846 WatchSource:0}: Error finding container a4400e59068fd205c64b233d89ff91af490a68c9715a92a33a7467b3cc423846: Status 404 returned error can't find the container with id a4400e59068fd205c64b233d89ff91af490a68c9715a92a33a7467b3cc423846 Jan 28 15:48:59 crc kubenswrapper[4811]: I0128 15:48:59.646683 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6cc89988fc-zxgtl"] Jan 28 15:48:59 crc kubenswrapper[4811]: W0128 15:48:59.671775 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb3babf0_f0e2_4f8a_a85f_f11ef65fe72d.slice/crio-27c8bf3104d72e3891550cb9f354aac67bc779aecfd21fae6ecac389a4a62c0c WatchSource:0}: Error finding container 27c8bf3104d72e3891550cb9f354aac67bc779aecfd21fae6ecac389a4a62c0c: Status 404 returned error can't find the container with id 27c8bf3104d72e3891550cb9f354aac67bc779aecfd21fae6ecac389a4a62c0c Jan 28 15:49:00 crc kubenswrapper[4811]: I0128 15:49:00.411057 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" event={"ID":"bb3aac9c-e96e-4a5e-beb5-aeff56394467","Type":"ContainerStarted","Data":"eecb3fe8b63ee536ad1e4628222f6d584141218f3c26d13f8a9354577ef01b30"} Jan 28 15:49:00 crc kubenswrapper[4811]: I0128 15:49:00.413539 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7","Type":"ContainerStarted","Data":"a4400e59068fd205c64b233d89ff91af490a68c9715a92a33a7467b3cc423846"} Jan 28 15:49:00 crc kubenswrapper[4811]: I0128 15:49:00.414548 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" event={"ID":"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d","Type":"ContainerStarted","Data":"27c8bf3104d72e3891550cb9f354aac67bc779aecfd21fae6ecac389a4a62c0c"} Jan 28 15:49:00 crc kubenswrapper[4811]: I0128 15:49:00.415829 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" event={"ID":"c5c0faf4-e93d-491d-9262-4fb8b542d77c","Type":"ContainerStarted","Data":"f6a7a9e7d34fd4bf30870de2eb798fcb94703611f615b34d7a4654f4054969aa"} Jan 28 15:49:00 crc kubenswrapper[4811]: E0128 15:49:00.417733 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-znvwk" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" Jan 28 15:49:01 crc kubenswrapper[4811]: I0128 15:49:01.436471 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"91f87058-06bb-4c9d-bff6-5f817c426b8b","Type":"ContainerStarted","Data":"2a1820170a498eed9286e1b6ee0085f106c55c7874ddb8333cd74179acd21e34"} Jan 28 15:49:02 crc kubenswrapper[4811]: I0128 15:49:02.447047 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" event={"ID":"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d","Type":"ContainerStarted","Data":"e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606"} Jan 28 15:49:02 crc kubenswrapper[4811]: I0128 15:49:02.450867 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"4eb1075bef300aa6f4b9a078a39752fc2bb693e12743f902b96be95bfa041b2e"} Jan 28 15:49:02 crc kubenswrapper[4811]: I0128 15:49:02.468809 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=43.468791045 podStartE2EDuration="43.468791045s" podCreationTimestamp="2026-01-28 15:48:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:02.465509156 +0000 UTC m=+235.219872759" watchObservedRunningTime="2026-01-28 15:49:02.468791045 +0000 UTC m=+235.223154648" Jan 28 15:49:03 crc kubenswrapper[4811]: I0128 15:49:03.460503 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" event={"ID":"c5c0faf4-e93d-491d-9262-4fb8b542d77c","Type":"ContainerStarted","Data":"b1f71740aafc4959f72e533c1d6ba99f1d6c60ea327ef66e4380bcaefa931b54"} Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.711085 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.711481 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7lgts,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mqvcx_openshift-marketplace(5f806cee-7e54-4979-a1a2-d42a4c892013): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.713304 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mqvcx" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.714205 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.714327 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8nl5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-v759z_openshift-marketplace(2195bb89-ac75-41a6-8e39-77506c50c101): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.715574 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-v759z" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.726445 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.726562 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sjwfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-zkcxm_openshift-marketplace(d23adda9-57ba-4cbf-be0a-6cec03fbdc1d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 28 15:49:03 crc kubenswrapper[4811]: E0128 15:49:03.727773 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-zkcxm" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" Jan 28 15:49:04 crc kubenswrapper[4811]: I0128 15:49:04.469745 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7","Type":"ContainerStarted","Data":"30b2c13b95f21b3ff9d5983f2abf53c8ba2cbb4207abe4b91ecfb8965622f26d"} Jan 28 15:49:04 crc kubenswrapper[4811]: I0128 15:49:04.471200 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:49:04 crc kubenswrapper[4811]: I0128 15:49:04.474473 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:49:04 crc kubenswrapper[4811]: I0128 15:49:04.519721 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" podStartSLOduration=50.519700529 podStartE2EDuration="50.519700529s" podCreationTimestamp="2026-01-28 15:48:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:04.516072791 +0000 UTC m=+237.270436384" watchObservedRunningTime="2026-01-28 15:49:04.519700529 +0000 UTC m=+237.274064122" Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.475793 4811 generic.go:334] "Generic (PLEG): container finished" podID="91f87058-06bb-4c9d-bff6-5f817c426b8b" containerID="2a1820170a498eed9286e1b6ee0085f106c55c7874ddb8333cd74179acd21e34" exitCode=0 Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.476504 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"91f87058-06bb-4c9d-bff6-5f817c426b8b","Type":"ContainerDied","Data":"2a1820170a498eed9286e1b6ee0085f106c55c7874ddb8333cd74179acd21e34"} Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.477221 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" podUID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" containerName="route-controller-manager" containerID="cri-o://b1f71740aafc4959f72e533c1d6ba99f1d6c60ea327ef66e4380bcaefa931b54" gracePeriod=30 Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.477738 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.485395 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.514086 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" podStartSLOduration=71.514062657 podStartE2EDuration="1m11.514062657s" podCreationTimestamp="2026-01-28 15:47:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:05.511160428 +0000 UTC m=+238.265524011" watchObservedRunningTime="2026-01-28 15:49:05.514062657 +0000 UTC m=+238.268426270" Jan 28 15:49:05 crc kubenswrapper[4811]: I0128 15:49:05.538687 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=41.538667759 podStartE2EDuration="41.538667759s" podCreationTimestamp="2026-01-28 15:48:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:05.537449826 +0000 UTC m=+238.291813429" watchObservedRunningTime="2026-01-28 15:49:05.538667759 +0000 UTC m=+238.293031342" Jan 28 15:49:10 crc kubenswrapper[4811]: I0128 15:49:10.514285 4811 generic.go:334] "Generic (PLEG): container finished" podID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" containerID="b1f71740aafc4959f72e533c1d6ba99f1d6c60ea327ef66e4380bcaefa931b54" exitCode=0 Jan 28 15:49:10 crc kubenswrapper[4811]: I0128 15:49:10.514523 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" event={"ID":"c5c0faf4-e93d-491d-9262-4fb8b542d77c","Type":"ContainerDied","Data":"b1f71740aafc4959f72e533c1d6ba99f1d6c60ea327ef66e4380bcaefa931b54"} Jan 28 15:49:11 crc kubenswrapper[4811]: I0128 15:49:11.351995 4811 patch_prober.go:28] interesting pod/route-controller-manager-75cb47dc7f-v6qgg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": dial tcp 10.217.0.54:8443: connect: connection refused" start-of-body= Jan 28 15:49:11 crc kubenswrapper[4811]: I0128 15:49:11.352056 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" podUID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": dial tcp 10.217.0.54:8443: connect: connection refused" Jan 28 15:49:11 crc kubenswrapper[4811]: E0128 15:49:11.999289 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mqvcx" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" Jan 28 15:49:12 crc kubenswrapper[4811]: E0128 15:49:11.999487 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-v759z" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" Jan 28 15:49:12 crc kubenswrapper[4811]: E0128 15:49:11.999560 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-zkcxm" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.059713 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.241089 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91f87058-06bb-4c9d-bff6-5f817c426b8b-kubelet-dir\") pod \"91f87058-06bb-4c9d-bff6-5f817c426b8b\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.241150 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91f87058-06bb-4c9d-bff6-5f817c426b8b-kube-api-access\") pod \"91f87058-06bb-4c9d-bff6-5f817c426b8b\" (UID: \"91f87058-06bb-4c9d-bff6-5f817c426b8b\") " Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.241193 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/91f87058-06bb-4c9d-bff6-5f817c426b8b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "91f87058-06bb-4c9d-bff6-5f817c426b8b" (UID: "91f87058-06bb-4c9d-bff6-5f817c426b8b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.241576 4811 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91f87058-06bb-4c9d-bff6-5f817c426b8b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.264445 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91f87058-06bb-4c9d-bff6-5f817c426b8b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "91f87058-06bb-4c9d-bff6-5f817c426b8b" (UID: "91f87058-06bb-4c9d-bff6-5f817c426b8b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.343085 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91f87058-06bb-4c9d-bff6-5f817c426b8b-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.404023 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.529493 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerStarted","Data":"87bb222eb4a487252106f56f9f33659aa1666ca5be60e4d76b17f36363c9b9e5"} Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.534665 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"91f87058-06bb-4c9d-bff6-5f817c426b8b","Type":"ContainerDied","Data":"fe9400446a7bc905c7f107ddb8a07cee2a5a57b5ad6c90af44795312417c9332"} Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.534713 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.534740 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe9400446a7bc905c7f107ddb8a07cee2a5a57b5ad6c90af44795312417c9332" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.537819 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerStarted","Data":"e2729603d1592c3ad45d98c17e97e316b669e821e1bd642303f23239c14837ef"} Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.540419 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerStarted","Data":"93069a42ef0a0370dbefe14ce1d790439dd5e35e977e89a07a89a62ce73e057c"} Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.545511 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-client-ca\") pod \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.545625 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-config\") pod \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.545684 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c0faf4-e93d-491d-9262-4fb8b542d77c-serving-cert\") pod \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.545719 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twlwj\" (UniqueName: \"kubernetes.io/projected/c5c0faf4-e93d-491d-9262-4fb8b542d77c-kube-api-access-twlwj\") pod \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\" (UID: \"c5c0faf4-e93d-491d-9262-4fb8b542d77c\") " Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.546173 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" event={"ID":"c5c0faf4-e93d-491d-9262-4fb8b542d77c","Type":"ContainerDied","Data":"f6a7a9e7d34fd4bf30870de2eb798fcb94703611f615b34d7a4654f4054969aa"} Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.546247 4811 scope.go:117] "RemoveContainer" containerID="b1f71740aafc4959f72e533c1d6ba99f1d6c60ea327ef66e4380bcaefa931b54" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.546493 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.546770 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-config" (OuterVolumeSpecName: "config") pod "c5c0faf4-e93d-491d-9262-4fb8b542d77c" (UID: "c5c0faf4-e93d-491d-9262-4fb8b542d77c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.547584 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-client-ca" (OuterVolumeSpecName: "client-ca") pod "c5c0faf4-e93d-491d-9262-4fb8b542d77c" (UID: "c5c0faf4-e93d-491d-9262-4fb8b542d77c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.549353 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t5mlc" event={"ID":"bb3aac9c-e96e-4a5e-beb5-aeff56394467","Type":"ContainerStarted","Data":"9facd4897940c74cbe8cef44e51d1954ded0cdfe6ee41e4b2ed36ef4a4de012d"} Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.555134 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5c0faf4-e93d-491d-9262-4fb8b542d77c-kube-api-access-twlwj" (OuterVolumeSpecName: "kube-api-access-twlwj") pod "c5c0faf4-e93d-491d-9262-4fb8b542d77c" (UID: "c5c0faf4-e93d-491d-9262-4fb8b542d77c"). InnerVolumeSpecName "kube-api-access-twlwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.560694 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c0faf4-e93d-491d-9262-4fb8b542d77c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c5c0faf4-e93d-491d-9262-4fb8b542d77c" (UID: "c5c0faf4-e93d-491d-9262-4fb8b542d77c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.606816 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-t5mlc" podStartSLOduration=224.606798363 podStartE2EDuration="3m44.606798363s" podCreationTimestamp="2026-01-28 15:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:12.605768225 +0000 UTC m=+245.360131808" watchObservedRunningTime="2026-01-28 15:49:12.606798363 +0000 UTC m=+245.361161946" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.647683 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.647721 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c0faf4-e93d-491d-9262-4fb8b542d77c-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.647730 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c0faf4-e93d-491d-9262-4fb8b542d77c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.647740 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twlwj\" (UniqueName: \"kubernetes.io/projected/c5c0faf4-e93d-491d-9262-4fb8b542d77c-kube-api-access-twlwj\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.891185 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg"] Jan 28 15:49:12 crc kubenswrapper[4811]: I0128 15:49:12.894778 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75cb47dc7f-v6qgg"] Jan 28 15:49:13 crc kubenswrapper[4811]: I0128 15:49:13.557719 4811 generic.go:334] "Generic (PLEG): container finished" podID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerID="87bb222eb4a487252106f56f9f33659aa1666ca5be60e4d76b17f36363c9b9e5" exitCode=0 Jan 28 15:49:13 crc kubenswrapper[4811]: I0128 15:49:13.557800 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerDied","Data":"87bb222eb4a487252106f56f9f33659aa1666ca5be60e4d76b17f36363c9b9e5"} Jan 28 15:49:13 crc kubenswrapper[4811]: I0128 15:49:13.560559 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerDied","Data":"e2729603d1592c3ad45d98c17e97e316b669e821e1bd642303f23239c14837ef"} Jan 28 15:49:13 crc kubenswrapper[4811]: I0128 15:49:13.560362 4811 generic.go:334] "Generic (PLEG): container finished" podID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerID="e2729603d1592c3ad45d98c17e97e316b669e821e1bd642303f23239c14837ef" exitCode=0 Jan 28 15:49:13 crc kubenswrapper[4811]: I0128 15:49:13.562794 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerID="93069a42ef0a0370dbefe14ce1d790439dd5e35e977e89a07a89a62ce73e057c" exitCode=0 Jan 28 15:49:13 crc kubenswrapper[4811]: I0128 15:49:13.562874 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerDied","Data":"93069a42ef0a0370dbefe14ce1d790439dd5e35e977e89a07a89a62ce73e057c"} Jan 28 15:49:14 crc kubenswrapper[4811]: I0128 15:49:14.346542 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" path="/var/lib/kubelet/pods/c5c0faf4-e93d-491d-9262-4fb8b542d77c/volumes" Jan 28 15:49:14 crc kubenswrapper[4811]: I0128 15:49:14.572616 4811 generic.go:334] "Generic (PLEG): container finished" podID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerID="19b9e50cff77e69ab40bb3e8e544d607c3b8ea26583c95ba6a1856d1b5c100c1" exitCode=0 Jan 28 15:49:14 crc kubenswrapper[4811]: I0128 15:49:14.572650 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znvwk" event={"ID":"87f8aef8-757b-4a73-9ee5-09751c3b7e92","Type":"ContainerDied","Data":"19b9e50cff77e69ab40bb3e8e544d607c3b8ea26583c95ba6a1856d1b5c100c1"} Jan 28 15:49:16 crc kubenswrapper[4811]: I0128 15:49:16.588241 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerStarted","Data":"0dcdd77dc06325cbcc7ef92bc452d42e9889122acd892228819e1f17ac59d812"} Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.078644 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j"] Jan 28 15:49:17 crc kubenswrapper[4811]: E0128 15:49:17.079114 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f87058-06bb-4c9d-bff6-5f817c426b8b" containerName="pruner" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.079145 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f87058-06bb-4c9d-bff6-5f817c426b8b" containerName="pruner" Jan 28 15:49:17 crc kubenswrapper[4811]: E0128 15:49:17.079186 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" containerName="route-controller-manager" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.079200 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" containerName="route-controller-manager" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.079586 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f87058-06bb-4c9d-bff6-5f817c426b8b" containerName="pruner" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.079676 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c0faf4-e93d-491d-9262-4fb8b542d77c" containerName="route-controller-manager" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.080642 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.087031 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.087662 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.088732 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.089205 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.089571 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.090709 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j"] Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.093388 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.214074 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-client-ca\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.214142 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c5rc\" (UniqueName: \"kubernetes.io/projected/f5018e7e-df1e-47a6-8170-7e380c68596e-kube-api-access-4c5rc\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.214297 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-config\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.214494 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5018e7e-df1e-47a6-8170-7e380c68596e-serving-cert\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.315756 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-client-ca\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.315827 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c5rc\" (UniqueName: \"kubernetes.io/projected/f5018e7e-df1e-47a6-8170-7e380c68596e-kube-api-access-4c5rc\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.315860 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-config\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.315907 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5018e7e-df1e-47a6-8170-7e380c68596e-serving-cert\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.316650 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-client-ca\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.317473 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-config\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.322160 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5018e7e-df1e-47a6-8170-7e380c68596e-serving-cert\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.337833 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c5rc\" (UniqueName: \"kubernetes.io/projected/f5018e7e-df1e-47a6-8170-7e380c68596e-kube-api-access-4c5rc\") pod \"route-controller-manager-5c8f657465-w529j\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:17 crc kubenswrapper[4811]: I0128 15:49:17.408205 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:18 crc kubenswrapper[4811]: I0128 15:49:18.627920 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mql9l" podStartSLOduration=4.675342004 podStartE2EDuration="1m41.627890181s" podCreationTimestamp="2026-01-28 15:47:37 +0000 UTC" firstStartedPulling="2026-01-28 15:47:38.645995727 +0000 UTC m=+151.400359310" lastFinishedPulling="2026-01-28 15:49:15.598543904 +0000 UTC m=+248.352907487" observedRunningTime="2026-01-28 15:49:18.625757123 +0000 UTC m=+251.380120706" watchObservedRunningTime="2026-01-28 15:49:18.627890181 +0000 UTC m=+251.382253764" Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.234312 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j"] Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.615001 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerStarted","Data":"713cf51a8a5728c12cfed8a8b4a84872e42df79e62ee93160c7ad84eeed69216"} Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.620758 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znvwk" event={"ID":"87f8aef8-757b-4a73-9ee5-09751c3b7e92","Type":"ContainerStarted","Data":"04980a755a0f820730eae8de3394b50e556c335c612950a84e022c533f17667a"} Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.624915 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" event={"ID":"f5018e7e-df1e-47a6-8170-7e380c68596e","Type":"ContainerStarted","Data":"479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89"} Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.624985 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" event={"ID":"f5018e7e-df1e-47a6-8170-7e380c68596e","Type":"ContainerStarted","Data":"a3947c8114509d67e707b5a3eb03ddc124c395b5441c52a964808af318b9d767"} Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.625592 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.627493 4811 patch_prober.go:28] interesting pod/route-controller-manager-5c8f657465-w529j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" start-of-body= Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.627577 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" podUID="f5018e7e-df1e-47a6-8170-7e380c68596e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.628034 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerStarted","Data":"a71c600f431b6d81cc6dae14e1588c1b9991c8fc77eaf5c09738ae201faaf088"} Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.634226 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerStarted","Data":"20b22582bcab0e8c3f887620a734574aeb04e6b359187bb1cdc9a2bc0b75eead"} Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.666365 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" podStartSLOduration=65.666324872 podStartE2EDuration="1m5.666324872s" podCreationTimestamp="2026-01-28 15:48:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:19.66517565 +0000 UTC m=+252.419539233" watchObservedRunningTime="2026-01-28 15:49:19.666324872 +0000 UTC m=+252.420688455" Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.709460 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5gjk2" podStartSLOduration=3.161209256 podStartE2EDuration="1m42.709420228s" podCreationTimestamp="2026-01-28 15:47:37 +0000 UTC" firstStartedPulling="2026-01-28 15:47:39.775602562 +0000 UTC m=+152.529966145" lastFinishedPulling="2026-01-28 15:49:19.323813534 +0000 UTC m=+252.078177117" observedRunningTime="2026-01-28 15:49:19.705159032 +0000 UTC m=+252.459522615" watchObservedRunningTime="2026-01-28 15:49:19.709420228 +0000 UTC m=+252.463783801" Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.735589 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n2nwr" podStartSLOduration=4.390572185 podStartE2EDuration="1m45.735554762s" podCreationTimestamp="2026-01-28 15:47:34 +0000 UTC" firstStartedPulling="2026-01-28 15:47:37.456619035 +0000 UTC m=+150.210982628" lastFinishedPulling="2026-01-28 15:49:18.801601622 +0000 UTC m=+251.555965205" observedRunningTime="2026-01-28 15:49:19.730995207 +0000 UTC m=+252.485358800" watchObservedRunningTime="2026-01-28 15:49:19.735554762 +0000 UTC m=+252.489918345" Jan 28 15:49:19 crc kubenswrapper[4811]: I0128 15:49:19.756748 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-znvwk" podStartSLOduration=4.294053976 podStartE2EDuration="1m44.756719519s" podCreationTimestamp="2026-01-28 15:47:35 +0000 UTC" firstStartedPulling="2026-01-28 15:47:38.71527322 +0000 UTC m=+151.469636803" lastFinishedPulling="2026-01-28 15:49:19.177938763 +0000 UTC m=+251.932302346" observedRunningTime="2026-01-28 15:49:19.754005545 +0000 UTC m=+252.508369148" watchObservedRunningTime="2026-01-28 15:49:19.756719519 +0000 UTC m=+252.511083102" Jan 28 15:49:20 crc kubenswrapper[4811]: I0128 15:49:20.645802 4811 generic.go:334] "Generic (PLEG): container finished" podID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerID="713cf51a8a5728c12cfed8a8b4a84872e42df79e62ee93160c7ad84eeed69216" exitCode=0 Jan 28 15:49:20 crc kubenswrapper[4811]: I0128 15:49:20.645888 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerDied","Data":"713cf51a8a5728c12cfed8a8b4a84872e42df79e62ee93160c7ad84eeed69216"} Jan 28 15:49:20 crc kubenswrapper[4811]: I0128 15:49:20.653682 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:21 crc kubenswrapper[4811]: I0128 15:49:21.669312 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerStarted","Data":"91c7303b3b4529a60344c974ea01c3f04efcec1bd355f3a2177f84d43abfe50f"} Jan 28 15:49:21 crc kubenswrapper[4811]: I0128 15:49:21.694126 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9qvvv" podStartSLOduration=3.416395361 podStartE2EDuration="1m43.694095574s" podCreationTimestamp="2026-01-28 15:47:38 +0000 UTC" firstStartedPulling="2026-01-28 15:47:40.803422092 +0000 UTC m=+153.557785675" lastFinishedPulling="2026-01-28 15:49:21.081122305 +0000 UTC m=+253.835485888" observedRunningTime="2026-01-28 15:49:21.691689989 +0000 UTC m=+254.446053572" watchObservedRunningTime="2026-01-28 15:49:21.694095574 +0000 UTC m=+254.448459157" Jan 28 15:49:23 crc kubenswrapper[4811]: I0128 15:49:23.689282 4811 generic.go:334] "Generic (PLEG): container finished" podID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerID="dd410c52f3c3ae0a96a55740e06288933953a4b692c6619ac2863899af1486bf" exitCode=0 Jan 28 15:49:23 crc kubenswrapper[4811]: I0128 15:49:23.689396 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqvcx" event={"ID":"5f806cee-7e54-4979-a1a2-d42a4c892013","Type":"ContainerDied","Data":"dd410c52f3c3ae0a96a55740e06288933953a4b692c6619ac2863899af1486bf"} Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.293990 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.294085 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.733322 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.734037 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.888309 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.891389 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:49:25 crc kubenswrapper[4811]: I0128 15:49:25.937490 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:49:26 crc kubenswrapper[4811]: I0128 15:49:26.758837 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.601417 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.601509 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.660616 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.753857 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.932954 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.933548 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:49:27 crc kubenswrapper[4811]: I0128 15:49:27.976140 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:49:28 crc kubenswrapper[4811]: I0128 15:49:28.763773 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:49:28 crc kubenswrapper[4811]: I0128 15:49:28.992148 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:49:28 crc kubenswrapper[4811]: I0128 15:49:28.992227 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:49:29 crc kubenswrapper[4811]: I0128 15:49:29.039690 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:49:29 crc kubenswrapper[4811]: I0128 15:49:29.764310 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:49:31 crc kubenswrapper[4811]: I0128 15:49:31.739705 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqvcx" event={"ID":"5f806cee-7e54-4979-a1a2-d42a4c892013","Type":"ContainerStarted","Data":"98714443f526a3cf5836d4c45e3be4aac9eff794d76f0ea7499e003fd4f8480f"} Jan 28 15:49:31 crc kubenswrapper[4811]: I0128 15:49:31.766974 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gjk2"] Jan 28 15:49:31 crc kubenswrapper[4811]: I0128 15:49:31.767426 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5gjk2" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="registry-server" containerID="cri-o://20b22582bcab0e8c3f887620a734574aeb04e6b359187bb1cdc9a2bc0b75eead" gracePeriod=2 Jan 28 15:49:31 crc kubenswrapper[4811]: I0128 15:49:31.965381 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9qvvv"] Jan 28 15:49:31 crc kubenswrapper[4811]: I0128 15:49:31.965759 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9qvvv" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="registry-server" containerID="cri-o://91c7303b3b4529a60344c974ea01c3f04efcec1bd355f3a2177f84d43abfe50f" gracePeriod=2 Jan 28 15:49:32 crc kubenswrapper[4811]: I0128 15:49:32.785121 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mqvcx" podStartSLOduration=6.116938221 podStartE2EDuration="1m57.78507854s" podCreationTimestamp="2026-01-28 15:47:35 +0000 UTC" firstStartedPulling="2026-01-28 15:47:38.550404615 +0000 UTC m=+151.304768198" lastFinishedPulling="2026-01-28 15:49:30.218544934 +0000 UTC m=+262.972908517" observedRunningTime="2026-01-28 15:49:32.782214641 +0000 UTC m=+265.536578244" watchObservedRunningTime="2026-01-28 15:49:32.78507854 +0000 UTC m=+265.539442123" Jan 28 15:49:33 crc kubenswrapper[4811]: I0128 15:49:33.751641 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerID="20b22582bcab0e8c3f887620a734574aeb04e6b359187bb1cdc9a2bc0b75eead" exitCode=0 Jan 28 15:49:33 crc kubenswrapper[4811]: I0128 15:49:33.751719 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerDied","Data":"20b22582bcab0e8c3f887620a734574aeb04e6b359187bb1cdc9a2bc0b75eead"} Jan 28 15:49:33 crc kubenswrapper[4811]: I0128 15:49:33.754309 4811 generic.go:334] "Generic (PLEG): container finished" podID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerID="91c7303b3b4529a60344c974ea01c3f04efcec1bd355f3a2177f84d43abfe50f" exitCode=0 Jan 28 15:49:33 crc kubenswrapper[4811]: I0128 15:49:33.754339 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerDied","Data":"91c7303b3b4529a60344c974ea01c3f04efcec1bd355f3a2177f84d43abfe50f"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.073221 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6cc89988fc-zxgtl"] Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.073788 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" podUID="fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" containerName="controller-manager" containerID="cri-o://e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606" gracePeriod=30 Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.168618 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j"] Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.168828 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" podUID="f5018e7e-df1e-47a6-8170-7e380c68596e" containerName="route-controller-manager" containerID="cri-o://479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89" gracePeriod=30 Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.311939 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.394487 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-catalog-content\") pod \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.394570 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-utilities\") pod \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.394724 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvkm4\" (UniqueName: \"kubernetes.io/projected/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-kube-api-access-pvkm4\") pod \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\" (UID: \"ad2a3f3f-a2fa-4868-a770-c114e995ddbc\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.402068 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-utilities" (OuterVolumeSpecName: "utilities") pod "ad2a3f3f-a2fa-4868-a770-c114e995ddbc" (UID: "ad2a3f3f-a2fa-4868-a770-c114e995ddbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.412760 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.416946 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-kube-api-access-pvkm4" (OuterVolumeSpecName: "kube-api-access-pvkm4") pod "ad2a3f3f-a2fa-4868-a770-c114e995ddbc" (UID: "ad2a3f3f-a2fa-4868-a770-c114e995ddbc"). InnerVolumeSpecName "kube-api-access-pvkm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.441184 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad2a3f3f-a2fa-4868-a770-c114e995ddbc" (UID: "ad2a3f3f-a2fa-4868-a770-c114e995ddbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.500313 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-catalog-content\") pod \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.500531 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvkm4\" (UniqueName: \"kubernetes.io/projected/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-kube-api-access-pvkm4\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.500546 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.500555 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad2a3f3f-a2fa-4868-a770-c114e995ddbc-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.601581 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbzq7\" (UniqueName: \"kubernetes.io/projected/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-kube-api-access-cbzq7\") pod \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.601705 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-utilities\") pod \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\" (UID: \"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.604803 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-utilities" (OuterVolumeSpecName: "utilities") pod "234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" (UID: "234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.606702 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-kube-api-access-cbzq7" (OuterVolumeSpecName: "kube-api-access-cbzq7") pod "234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" (UID: "234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6"). InnerVolumeSpecName "kube-api-access-cbzq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.618561 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.638159 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.697934 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" (UID: "234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.703254 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.703365 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbzq7\" (UniqueName: \"kubernetes.io/projected/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-kube-api-access-cbzq7\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.703385 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.760319 4811 generic.go:334] "Generic (PLEG): container finished" podID="f5018e7e-df1e-47a6-8170-7e380c68596e" containerID="479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89" exitCode=0 Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.760384 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.760401 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" event={"ID":"f5018e7e-df1e-47a6-8170-7e380c68596e","Type":"ContainerDied","Data":"479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.760460 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j" event={"ID":"f5018e7e-df1e-47a6-8170-7e380c68596e","Type":"ContainerDied","Data":"a3947c8114509d67e707b5a3eb03ddc124c395b5441c52a964808af318b9d767"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.760481 4811 scope.go:117] "RemoveContainer" containerID="479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.781450 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerStarted","Data":"ffcdcc3e4fedf8b3b4b6ff6008cac4e474be2d223df97c8cfc7e0f4664c6f447"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.784399 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gjk2" event={"ID":"ad2a3f3f-a2fa-4868-a770-c114e995ddbc","Type":"ContainerDied","Data":"3c40a6448bd749faf5f3ebf52b85d5a951b7dec7e5867d07c8d8c14141294733"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.784475 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gjk2" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.794363 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9qvvv" event={"ID":"234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6","Type":"ContainerDied","Data":"7cde01621c6236e04201c18df537fab656f73fbb1602aeb17731ec1a4de01723"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.794549 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9qvvv" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806304 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c5rc\" (UniqueName: \"kubernetes.io/projected/f5018e7e-df1e-47a6-8170-7e380c68596e-kube-api-access-4c5rc\") pod \"f5018e7e-df1e-47a6-8170-7e380c68596e\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806370 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-serving-cert\") pod \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806440 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-config\") pod \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806479 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5018e7e-df1e-47a6-8170-7e380c68596e-serving-cert\") pod \"f5018e7e-df1e-47a6-8170-7e380c68596e\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806511 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-config\") pod \"f5018e7e-df1e-47a6-8170-7e380c68596e\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806533 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-proxy-ca-bundles\") pod \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806562 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-client-ca\") pod \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806595 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-client-ca\") pod \"f5018e7e-df1e-47a6-8170-7e380c68596e\" (UID: \"f5018e7e-df1e-47a6-8170-7e380c68596e\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.806631 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrr52\" (UniqueName: \"kubernetes.io/projected/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-kube-api-access-zrr52\") pod \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\" (UID: \"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d\") " Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.810224 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" (UID: "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.810643 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-config" (OuterVolumeSpecName: "config") pod "f5018e7e-df1e-47a6-8170-7e380c68596e" (UID: "f5018e7e-df1e-47a6-8170-7e380c68596e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.811193 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-client-ca" (OuterVolumeSpecName: "client-ca") pod "f5018e7e-df1e-47a6-8170-7e380c68596e" (UID: "f5018e7e-df1e-47a6-8170-7e380c68596e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.811351 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-client-ca" (OuterVolumeSpecName: "client-ca") pod "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" (UID: "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.811636 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-config" (OuterVolumeSpecName: "config") pod "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" (UID: "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.820156 4811 generic.go:334] "Generic (PLEG): container finished" podID="fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" containerID="e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606" exitCode=0 Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.820286 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.820314 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" event={"ID":"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d","Type":"ContainerDied","Data":"e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.821343 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6cc89988fc-zxgtl" event={"ID":"fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d","Type":"ContainerDied","Data":"27c8bf3104d72e3891550cb9f354aac67bc779aecfd21fae6ecac389a4a62c0c"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.825114 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerStarted","Data":"87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa"} Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.835105 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gjk2"] Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.850990 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gjk2"] Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.851445 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9qvvv"] Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.861501 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9qvvv"] Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.907981 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.908278 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.908342 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.908453 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:34 crc kubenswrapper[4811]: I0128 15:49:34.908525 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5018e7e-df1e-47a6-8170-7e380c68596e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.010942 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5018e7e-df1e-47a6-8170-7e380c68596e-kube-api-access-4c5rc" (OuterVolumeSpecName: "kube-api-access-4c5rc") pod "f5018e7e-df1e-47a6-8170-7e380c68596e" (UID: "f5018e7e-df1e-47a6-8170-7e380c68596e"). InnerVolumeSpecName "kube-api-access-4c5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.010989 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-kube-api-access-zrr52" (OuterVolumeSpecName: "kube-api-access-zrr52") pod "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" (UID: "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d"). InnerVolumeSpecName "kube-api-access-zrr52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.011316 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5018e7e-df1e-47a6-8170-7e380c68596e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f5018e7e-df1e-47a6-8170-7e380c68596e" (UID: "f5018e7e-df1e-47a6-8170-7e380c68596e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.023739 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" (UID: "fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.024270 4811 scope.go:117] "RemoveContainer" containerID="479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89" Jan 28 15:49:35 crc kubenswrapper[4811]: E0128 15:49:35.024730 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89\": container with ID starting with 479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89 not found: ID does not exist" containerID="479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.024815 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89"} err="failed to get container status \"479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89\": rpc error: code = NotFound desc = could not find container \"479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89\": container with ID starting with 479c6f9da8bf07368e4ae35bc945711a563359e9195e3fc96c176771fdaabf89 not found: ID does not exist" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.024851 4811 scope.go:117] "RemoveContainer" containerID="20b22582bcab0e8c3f887620a734574aeb04e6b359187bb1cdc9a2bc0b75eead" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.049356 4811 scope.go:117] "RemoveContainer" containerID="93069a42ef0a0370dbefe14ce1d790439dd5e35e977e89a07a89a62ce73e057c" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.069341 4811 scope.go:117] "RemoveContainer" containerID="ac2c6c37b0f2fb9630e6864a283edc8f7782413efd477b7b8cc0f28453012a8d" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.110560 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c5rc\" (UniqueName: \"kubernetes.io/projected/f5018e7e-df1e-47a6-8170-7e380c68596e-kube-api-access-4c5rc\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.110607 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.110619 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5018e7e-df1e-47a6-8170-7e380c68596e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.110629 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrr52\" (UniqueName: \"kubernetes.io/projected/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d-kube-api-access-zrr52\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.161311 4811 scope.go:117] "RemoveContainer" containerID="91c7303b3b4529a60344c974ea01c3f04efcec1bd355f3a2177f84d43abfe50f" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.162821 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j"] Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.166713 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f657465-w529j"] Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.186526 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6cc89988fc-zxgtl"] Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.191971 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6cc89988fc-zxgtl"] Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.192412 4811 scope.go:117] "RemoveContainer" containerID="713cf51a8a5728c12cfed8a8b4a84872e42df79e62ee93160c7ad84eeed69216" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.224920 4811 scope.go:117] "RemoveContainer" containerID="95b17f0aa2e93ea4bb9c4c1409fef09d4bcd76e8997c1677f496e2411afb9dbd" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.240527 4811 scope.go:117] "RemoveContainer" containerID="e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.257757 4811 scope.go:117] "RemoveContainer" containerID="e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606" Jan 28 15:49:35 crc kubenswrapper[4811]: E0128 15:49:35.258353 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606\": container with ID starting with e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606 not found: ID does not exist" containerID="e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.258470 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606"} err="failed to get container status \"e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606\": rpc error: code = NotFound desc = could not find container \"e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606\": container with ID starting with e5dbcf65738297ab27e42ab1bab6424f9bdba109e93420a770d84cd92f0be606 not found: ID does not exist" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.833277 4811 generic.go:334] "Generic (PLEG): container finished" podID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerID="87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa" exitCode=0 Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.833340 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerDied","Data":"87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa"} Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.836381 4811 generic.go:334] "Generic (PLEG): container finished" podID="2195bb89-ac75-41a6-8e39-77506c50c101" containerID="ffcdcc3e4fedf8b3b4b6ff6008cac4e474be2d223df97c8cfc7e0f4664c6f447" exitCode=0 Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.836469 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerDied","Data":"ffcdcc3e4fedf8b3b4b6ff6008cac4e474be2d223df97c8cfc7e0f4664c6f447"} Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.865249 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.865566 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:49:35 crc kubenswrapper[4811]: I0128 15:49:35.909075 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.086600 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7d4588949f-27gc6"] Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.086933 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="extract-content" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.086951 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="extract-content" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.086967 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" containerName="controller-manager" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.086976 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" containerName="controller-manager" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.086991 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="extract-content" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.086999 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="extract-content" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.087015 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="registry-server" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087023 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="registry-server" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.087036 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="extract-utilities" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087043 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="extract-utilities" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.087052 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="registry-server" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087058 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="registry-server" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.087067 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5018e7e-df1e-47a6-8170-7e380c68596e" containerName="route-controller-manager" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087074 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5018e7e-df1e-47a6-8170-7e380c68596e" containerName="route-controller-manager" Jan 28 15:49:36 crc kubenswrapper[4811]: E0128 15:49:36.087087 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="extract-utilities" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087096 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="extract-utilities" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087209 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" containerName="registry-server" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087223 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" containerName="controller-manager" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087234 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" containerName="registry-server" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087243 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5018e7e-df1e-47a6-8170-7e380c68596e" containerName="route-controller-manager" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.087797 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.090745 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.090812 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.091971 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.092008 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.092693 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.092755 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4"] Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.092705 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.093498 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.095823 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.095838 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.098821 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.098926 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.099124 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.099198 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.100859 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4"] Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.102673 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.106391 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d4588949f-27gc6"] Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226759 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-config\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226797 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-proxy-ca-bundles\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226832 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-config\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226905 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zssw\" (UniqueName: \"kubernetes.io/projected/4a79731b-e3a0-4bf4-8ca2-985516ee313e-kube-api-access-2zssw\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226940 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdm7t\" (UniqueName: \"kubernetes.io/projected/abacc063-d637-43aa-8456-e88063ae9d69-kube-api-access-sdm7t\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226963 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a79731b-e3a0-4bf4-8ca2-985516ee313e-serving-cert\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.226989 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-client-ca\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.227018 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abacc063-d637-43aa-8456-e88063ae9d69-serving-cert\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.227093 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-client-ca\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328570 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-client-ca\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328640 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-config\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328662 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-proxy-ca-bundles\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328687 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-config\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328722 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zssw\" (UniqueName: \"kubernetes.io/projected/4a79731b-e3a0-4bf4-8ca2-985516ee313e-kube-api-access-2zssw\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328741 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdm7t\" (UniqueName: \"kubernetes.io/projected/abacc063-d637-43aa-8456-e88063ae9d69-kube-api-access-sdm7t\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328760 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a79731b-e3a0-4bf4-8ca2-985516ee313e-serving-cert\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328790 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-client-ca\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.328812 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abacc063-d637-43aa-8456-e88063ae9d69-serving-cert\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.330249 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-client-ca\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.330305 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-client-ca\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.330651 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-config\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.330930 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-proxy-ca-bundles\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.332127 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-config\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.334399 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a79731b-e3a0-4bf4-8ca2-985516ee313e-serving-cert\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.341454 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abacc063-d637-43aa-8456-e88063ae9d69-serving-cert\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.343907 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zssw\" (UniqueName: \"kubernetes.io/projected/4a79731b-e3a0-4bf4-8ca2-985516ee313e-kube-api-access-2zssw\") pod \"route-controller-manager-f5dbfb54b-p5jv4\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.346272 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdm7t\" (UniqueName: \"kubernetes.io/projected/abacc063-d637-43aa-8456-e88063ae9d69-kube-api-access-sdm7t\") pod \"controller-manager-7d4588949f-27gc6\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.347365 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6" path="/var/lib/kubelet/pods/234bbc6b-c1e7-4dd5-8a6c-cb3c8782f6c6/volumes" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.348109 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad2a3f3f-a2fa-4868-a770-c114e995ddbc" path="/var/lib/kubelet/pods/ad2a3f3f-a2fa-4868-a770-c114e995ddbc/volumes" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.348843 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5018e7e-df1e-47a6-8170-7e380c68596e" path="/var/lib/kubelet/pods/f5018e7e-df1e-47a6-8170-7e380c68596e/volumes" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.349905 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d" path="/var/lib/kubelet/pods/fb3babf0-f0e2-4f8a-a85f-f11ef65fe72d/volumes" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.415292 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.423918 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.847975 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d4588949f-27gc6"] Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.852172 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4"] Jan 28 15:49:36 crc kubenswrapper[4811]: W0128 15:49:36.858295 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a79731b_e3a0_4bf4_8ca2_985516ee313e.slice/crio-75beef60ac78453e1e17e3d9c2449431e59bab736b12983dd616cd6d1426b988 WatchSource:0}: Error finding container 75beef60ac78453e1e17e3d9c2449431e59bab736b12983dd616cd6d1426b988: Status 404 returned error can't find the container with id 75beef60ac78453e1e17e3d9c2449431e59bab736b12983dd616cd6d1426b988 Jan 28 15:49:36 crc kubenswrapper[4811]: I0128 15:49:36.903689 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.859357 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" event={"ID":"4a79731b-e3a0-4bf4-8ca2-985516ee313e","Type":"ContainerStarted","Data":"13d77925eadc21a87f3ff21518e989e13b88c4fd3c47b0c7a52dc7f2ced9a7ff"} Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.859624 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" event={"ID":"4a79731b-e3a0-4bf4-8ca2-985516ee313e","Type":"ContainerStarted","Data":"75beef60ac78453e1e17e3d9c2449431e59bab736b12983dd616cd6d1426b988"} Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.859642 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.861036 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerStarted","Data":"72218ec5609a7f64e6796942b08af565a06acd4abfef5235c620cffef6f2ea9a"} Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.862478 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" event={"ID":"abacc063-d637-43aa-8456-e88063ae9d69","Type":"ContainerStarted","Data":"a2b6ab2aa4b5e5e9162660191b2f2d5a16288e25e58e0a7a4c25adc07a4b2da9"} Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.862508 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" event={"ID":"abacc063-d637-43aa-8456-e88063ae9d69","Type":"ContainerStarted","Data":"fa71079d38d8a33c6023f5c4c07613278a314e2c6b2c935f149beb116d3c299c"} Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.868293 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:49:37 crc kubenswrapper[4811]: I0128 15:49:37.888708 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" podStartSLOduration=3.888687407 podStartE2EDuration="3.888687407s" podCreationTimestamp="2026-01-28 15:49:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:37.884578685 +0000 UTC m=+270.638942268" watchObservedRunningTime="2026-01-28 15:49:37.888687407 +0000 UTC m=+270.643050990" Jan 28 15:49:38 crc kubenswrapper[4811]: I0128 15:49:38.916538 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" podStartSLOduration=4.916513259 podStartE2EDuration="4.916513259s" podCreationTimestamp="2026-01-28 15:49:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:49:38.912832038 +0000 UTC m=+271.667195621" watchObservedRunningTime="2026-01-28 15:49:38.916513259 +0000 UTC m=+271.670876852" Jan 28 15:49:38 crc kubenswrapper[4811]: I0128 15:49:38.917109 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v759z" podStartSLOduration=3.7210526919999998 podStartE2EDuration="2m0.917102064s" podCreationTimestamp="2026-01-28 15:47:38 +0000 UTC" firstStartedPulling="2026-01-28 15:47:39.787504962 +0000 UTC m=+152.541868545" lastFinishedPulling="2026-01-28 15:49:36.983554334 +0000 UTC m=+269.737917917" observedRunningTime="2026-01-28 15:49:38.895159696 +0000 UTC m=+271.649523289" watchObservedRunningTime="2026-01-28 15:49:38.917102064 +0000 UTC m=+271.671465647" Jan 28 15:49:39 crc kubenswrapper[4811]: I0128 15:49:39.565056 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dgnfz"] Jan 28 15:49:39 crc kubenswrapper[4811]: I0128 15:49:39.874369 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerStarted","Data":"8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630"} Jan 28 15:49:39 crc kubenswrapper[4811]: I0128 15:49:39.896791 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zkcxm" podStartSLOduration=5.093040156 podStartE2EDuration="2m4.896774802s" podCreationTimestamp="2026-01-28 15:47:35 +0000 UTC" firstStartedPulling="2026-01-28 15:47:38.722619924 +0000 UTC m=+151.476983507" lastFinishedPulling="2026-01-28 15:49:38.52635457 +0000 UTC m=+271.280718153" observedRunningTime="2026-01-28 15:49:39.893788031 +0000 UTC m=+272.648151604" watchObservedRunningTime="2026-01-28 15:49:39.896774802 +0000 UTC m=+272.651138375" Jan 28 15:49:40 crc kubenswrapper[4811]: I0128 15:49:40.766421 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqvcx"] Jan 28 15:49:40 crc kubenswrapper[4811]: I0128 15:49:40.766951 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mqvcx" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="registry-server" containerID="cri-o://98714443f526a3cf5836d4c45e3be4aac9eff794d76f0ea7499e003fd4f8480f" gracePeriod=2 Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.889730 4811 generic.go:334] "Generic (PLEG): container finished" podID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerID="98714443f526a3cf5836d4c45e3be4aac9eff794d76f0ea7499e003fd4f8480f" exitCode=0 Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.889776 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqvcx" event={"ID":"5f806cee-7e54-4979-a1a2-d42a4c892013","Type":"ContainerDied","Data":"98714443f526a3cf5836d4c45e3be4aac9eff794d76f0ea7499e003fd4f8480f"} Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.889804 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqvcx" event={"ID":"5f806cee-7e54-4979-a1a2-d42a4c892013","Type":"ContainerDied","Data":"e08273033fe47c59c1a4a1de52a985f86333cfd04593f223a3b0b3c2fc48a72f"} Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.889817 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e08273033fe47c59c1a4a1de52a985f86333cfd04593f223a3b0b3c2fc48a72f" Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.890257 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.905948 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lgts\" (UniqueName: \"kubernetes.io/projected/5f806cee-7e54-4979-a1a2-d42a4c892013-kube-api-access-7lgts\") pod \"5f806cee-7e54-4979-a1a2-d42a4c892013\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.906012 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-utilities\") pod \"5f806cee-7e54-4979-a1a2-d42a4c892013\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.906049 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-catalog-content\") pod \"5f806cee-7e54-4979-a1a2-d42a4c892013\" (UID: \"5f806cee-7e54-4979-a1a2-d42a4c892013\") " Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.907906 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-utilities" (OuterVolumeSpecName: "utilities") pod "5f806cee-7e54-4979-a1a2-d42a4c892013" (UID: "5f806cee-7e54-4979-a1a2-d42a4c892013"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.917634 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f806cee-7e54-4979-a1a2-d42a4c892013-kube-api-access-7lgts" (OuterVolumeSpecName: "kube-api-access-7lgts") pod "5f806cee-7e54-4979-a1a2-d42a4c892013" (UID: "5f806cee-7e54-4979-a1a2-d42a4c892013"). InnerVolumeSpecName "kube-api-access-7lgts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:41 crc kubenswrapper[4811]: I0128 15:49:41.952925 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f806cee-7e54-4979-a1a2-d42a4c892013" (UID: "5f806cee-7e54-4979-a1a2-d42a4c892013"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.007185 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.007219 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lgts\" (UniqueName: \"kubernetes.io/projected/5f806cee-7e54-4979-a1a2-d42a4c892013-kube-api-access-7lgts\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.007230 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f806cee-7e54-4979-a1a2-d42a4c892013-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.020588 4811 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.020889 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="registry-server" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.020905 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="registry-server" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.020936 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="extract-content" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.020942 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="extract-content" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.020955 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="extract-utilities" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.020964 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="extract-utilities" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.021085 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" containerName="registry-server" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.021583 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.021966 4811 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022540 4811 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022749 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424" gracePeriod=15 Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.022879 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022901 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.022909 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022777 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15" gracePeriod=15 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022774 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5" gracePeriod=15 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022774 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16" gracePeriod=15 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022777 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338" gracePeriod=15 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.022920 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.023072 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023081 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.023092 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023100 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.023111 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023119 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.023131 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023139 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.023149 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023157 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: E0128 15:49:42.023168 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023176 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023283 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023296 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023306 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023319 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023329 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023338 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.023573 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209634 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209743 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209769 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209807 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209825 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209845 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209867 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.209893 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310755 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310807 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310866 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310890 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310910 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310959 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310920 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.310999 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311025 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311039 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311070 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311100 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311107 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311136 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.311189 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.897302 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.898395 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.899066 4811 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338" exitCode=0 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.899099 4811 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16" exitCode=0 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.899111 4811 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15" exitCode=0 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.899122 4811 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5" exitCode=2 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.899199 4811 scope.go:117] "RemoveContainer" containerID="9a74503c85223a97e2c4e49e2e06a14e3512bbf5c0bae0e584e98449ce860c65" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.901894 4811 generic.go:334] "Generic (PLEG): container finished" podID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" containerID="30b2c13b95f21b3ff9d5983f2abf53c8ba2cbb4207abe4b91ecfb8965622f26d" exitCode=0 Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.901965 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqvcx" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.902712 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7","Type":"ContainerDied","Data":"30b2c13b95f21b3ff9d5983f2abf53c8ba2cbb4207abe4b91ecfb8965622f26d"} Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.904455 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.905031 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.905685 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.906101 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.924255 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:42 crc kubenswrapper[4811]: I0128 15:49:42.924567 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:43 crc kubenswrapper[4811]: I0128 15:49:43.913319 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.363368 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.364175 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.364415 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.452537 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-var-lock\") pod \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.452575 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kubelet-dir\") pod \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.452639 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kube-api-access\") pod \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\" (UID: \"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7\") " Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.452686 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-var-lock" (OuterVolumeSpecName: "var-lock") pod "89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" (UID: "89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.452739 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" (UID: "89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.453037 4811 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-var-lock\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.453061 4811 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.463539 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" (UID: "89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.555454 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.922833 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7","Type":"ContainerDied","Data":"a4400e59068fd205c64b233d89ff91af490a68c9715a92a33a7467b3cc423846"} Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.923191 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4400e59068fd205c64b233d89ff91af490a68c9715a92a33a7467b3cc423846" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.922850 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.926276 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.927032 4811 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424" exitCode=0 Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.954281 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.954537 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.961420 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.962227 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.962609 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.962814 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:44 crc kubenswrapper[4811]: I0128 15:49:44.963144 4811 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.060977 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061049 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061073 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061152 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061273 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061463 4811 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061475 4811 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.061595 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.162514 4811 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.794481 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.794560 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.852961 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.854829 4811 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.855208 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.855693 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.855996 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.936194 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.936963 4811 scope.go:117] "RemoveContainer" containerID="03015015ac6736269b093c13f4df7abdad3a954c3336cc0b2de2e44cc5ae8338" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.937039 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.951706 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.952348 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.952925 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.953376 4811 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.954210 4811 scope.go:117] "RemoveContainer" containerID="dff502d69ae4a602ed69e945be2c2d7abad50b914ddf1d858dec07d6bca37f16" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.969716 4811 scope.go:117] "RemoveContainer" containerID="ce6d91df8e67b10dadf069fa87b2d8acc9556dc12cbf961e118ee41272db7e15" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.977029 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.977615 4811 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.978098 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.978766 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.979177 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.984498 4811 scope.go:117] "RemoveContainer" containerID="65971815d249a487a919017f3868e971a7db0e20fb26cbb3b636b67f0c592dc5" Jan 28 15:49:45 crc kubenswrapper[4811]: I0128 15:49:45.996269 4811 scope.go:117] "RemoveContainer" containerID="222220dee12a98a62feccc1180fd540b511541327b80c3a9aa58e7964d42e424" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.010128 4811 scope.go:117] "RemoveContainer" containerID="0198ec900503e6c8564bb2772deaee57c7a3129a6a9c378e5429787fc9cf0124" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.349692 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.415991 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.422366 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.422852 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.423095 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.423343 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:46 crc kubenswrapper[4811]: I0128 15:49:46.425224 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:47 crc kubenswrapper[4811]: E0128 15:49:47.072120 4811 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:47 crc kubenswrapper[4811]: I0128 15:49:47.072784 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:47 crc kubenswrapper[4811]: E0128 15:49:47.093609 4811 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188eefd198c12065 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-28 15:49:47.093180517 +0000 UTC m=+279.847544110,LastTimestamp:2026-01-28 15:49:47.093180517 +0000 UTC m=+279.847544110,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 28 15:49:47 crc kubenswrapper[4811]: I0128 15:49:47.951268 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7c606018902fca6d0a8194da0685983026fde56f75909c7ce0994d66cf38a45d"} Jan 28 15:49:47 crc kubenswrapper[4811]: I0128 15:49:47.951583 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"025d45f8cfe44737bafbe791dee0c4e79cd3a0af237f26e36cbe64df6deb7727"} Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.343595 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.344968 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.346220 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.346646 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.461296 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.461356 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:49:48 crc kubenswrapper[4811]: I0128 15:49:48.510221 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.510826 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.511387 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.511722 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.512061 4811 status_manager.go:851] "Failed to get status for pod" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" pod="openshift-marketplace/redhat-operators-v759z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-v759z\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.512357 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.959514 4811 status_manager.go:851] "Failed to get status for pod" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" pod="openshift-marketplace/redhat-operators-v759z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-v759z\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:48.959613 4811 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.959936 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.960279 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.960571 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.960968 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.997882 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.998545 4811 status_manager.go:851] "Failed to get status for pod" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" pod="openshift-marketplace/redhat-operators-v759z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-v759z\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:48.999363 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:49.000142 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:49.000524 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:49.000901 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.206788 4811 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.207808 4811 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.208499 4811 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.208924 4811 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.209281 4811 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:49 crc kubenswrapper[4811]: I0128 15:49:49.209327 4811 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.209760 4811 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="200ms" Jan 28 15:49:49 crc kubenswrapper[4811]: E0128 15:49:49.410740 4811 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="400ms" Jan 28 15:49:50 crc kubenswrapper[4811]: E0128 15:49:49.811604 4811 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="800ms" Jan 28 15:49:50 crc kubenswrapper[4811]: E0128 15:49:50.612931 4811 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="1.6s" Jan 28 15:49:51 crc kubenswrapper[4811]: E0128 15:49:51.971562 4811 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188eefd198c12065 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-28 15:49:47.093180517 +0000 UTC m=+279.847544110,LastTimestamp:2026-01-28 15:49:47.093180517 +0000 UTC m=+279.847544110,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 28 15:49:52 crc kubenswrapper[4811]: E0128 15:49:52.214497 4811 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="3.2s" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.339234 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.340225 4811 status_manager.go:851] "Failed to get status for pod" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" pod="openshift-marketplace/redhat-operators-v759z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-v759z\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.340805 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.341368 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.342205 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.342736 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.358725 4811 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.358773 4811 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:49:54 crc kubenswrapper[4811]: E0128 15:49:54.359330 4811 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:54 crc kubenswrapper[4811]: I0128 15:49:54.360094 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:54 crc kubenswrapper[4811]: W0128 15:49:54.389098 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-03bff0750751e3935d342243b87da6aa6a6392234f81aa7425958ab3e6a6e085 WatchSource:0}: Error finding container 03bff0750751e3935d342243b87da6aa6a6392234f81aa7425958ab3e6a6e085: Status 404 returned error can't find the container with id 03bff0750751e3935d342243b87da6aa6a6392234f81aa7425958ab3e6a6e085 Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.003287 4811 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="df8892a53b2fc85a26126fd9d17c7a7ec5179f229a67032cec58151bc22cfdbd" exitCode=0 Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.003344 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"df8892a53b2fc85a26126fd9d17c7a7ec5179f229a67032cec58151bc22cfdbd"} Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.003375 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"03bff0750751e3935d342243b87da6aa6a6392234f81aa7425958ab3e6a6e085"} Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.003702 4811 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.003719 4811 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:49:55 crc kubenswrapper[4811]: E0128 15:49:55.004249 4811 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.004538 4811 status_manager.go:851] "Failed to get status for pod" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" pod="openshift-marketplace/certified-operators-mqvcx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mqvcx\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.005085 4811 status_manager.go:851] "Failed to get status for pod" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.005588 4811 status_manager.go:851] "Failed to get status for pod" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" pod="openshift-marketplace/community-operators-zkcxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zkcxm\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.006030 4811 status_manager.go:851] "Failed to get status for pod" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" pod="openshift-marketplace/redhat-operators-v759z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-v759z\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:55 crc kubenswrapper[4811]: I0128 15:49:55.006498 4811 status_manager.go:851] "Failed to get status for pod" podUID="abacc063-d637-43aa-8456-e88063ae9d69" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-7d4588949f-27gc6\": dial tcp 38.102.83.233:6443: connect: connection refused" Jan 28 15:49:55 crc kubenswrapper[4811]: E0128 15:49:55.415715 4811 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="6.4s" Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.028769 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.029026 4811 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0" exitCode=1 Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.029094 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0"} Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.029598 4811 scope.go:117] "RemoveContainer" containerID="e528919fa4f8c2b8108615f2dd2d79d790aea2e94bb7b2de7bb1cd188fd47ac0" Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.037264 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e799ff998e3024c20a9beb164b3e5e7212960c7cba7cebed99ab9ad0c7670980"} Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.037325 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a8bffc1cf1fbccd96840087d4b0eb9030769dda06e1333656995a18c41647aae"} Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.037342 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fa1dd4f56a9a2d5f2a9098d24e49adf356baea7d58290ae5a346d1c2896c4cef"} Jan 28 15:49:56 crc kubenswrapper[4811]: I0128 15:49:56.547407 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.047344 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.047493 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"94895bf0231687ce88cf3fc9660fef4ccc5d8579dab087ec5c5032fc8a0b75a4"} Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.050829 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"807fc676facd3f08315b6625c79a53511ca31f3922b3aece1fbddaf8d48bcbb9"} Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.050850 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"82b89cc436d918230b6406da25ce3d92ca9ad33679b321a5ff9f1952623c00a4"} Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.050966 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.051051 4811 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:49:57 crc kubenswrapper[4811]: I0128 15:49:57.051066 4811 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:49:59 crc kubenswrapper[4811]: I0128 15:49:59.360211 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:59 crc kubenswrapper[4811]: I0128 15:49:59.360258 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:49:59 crc kubenswrapper[4811]: I0128 15:49:59.366717 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:50:02 crc kubenswrapper[4811]: I0128 15:50:02.058493 4811 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:50:02 crc kubenswrapper[4811]: I0128 15:50:02.078592 4811 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:50:02 crc kubenswrapper[4811]: I0128 15:50:02.078624 4811 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:50:02 crc kubenswrapper[4811]: I0128 15:50:02.082646 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:50:02 crc kubenswrapper[4811]: I0128 15:50:02.084959 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="1d835bc8-991f-41fe-8492-995139344813" Jan 28 15:50:03 crc kubenswrapper[4811]: I0128 15:50:03.084280 4811 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:50:03 crc kubenswrapper[4811]: I0128 15:50:03.084327 4811 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:50:04 crc kubenswrapper[4811]: I0128 15:50:04.366518 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:50:04 crc kubenswrapper[4811]: I0128 15:50:04.593912 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" podUID="f28ff403-47a1-47de-b3f4-f6519b75064f" containerName="oauth-openshift" containerID="cri-o://84169e25180ec9a14d024ee723345e5a801b6eba011a74a1f79967734c92f32a" gracePeriod=15 Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.098886 4811 generic.go:334] "Generic (PLEG): container finished" podID="f28ff403-47a1-47de-b3f4-f6519b75064f" containerID="84169e25180ec9a14d024ee723345e5a801b6eba011a74a1f79967734c92f32a" exitCode=0 Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.099011 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" event={"ID":"f28ff403-47a1-47de-b3f4-f6519b75064f","Type":"ContainerDied","Data":"84169e25180ec9a14d024ee723345e5a801b6eba011a74a1f79967734c92f32a"} Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.140059 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229238 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-serving-cert\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229332 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-policies\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229366 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-session\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229408 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-ocp-branding-template\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229524 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-cliconfig\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229589 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-error\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229651 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-idp-0-file-data\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229694 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-trusted-ca-bundle\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229766 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-service-ca\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229855 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-dir\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229895 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-login\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.229961 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-provider-selection\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.230016 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whdh7\" (UniqueName: \"kubernetes.io/projected/f28ff403-47a1-47de-b3f4-f6519b75064f-kube-api-access-whdh7\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.230049 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-router-certs\") pod \"f28ff403-47a1-47de-b3f4-f6519b75064f\" (UID: \"f28ff403-47a1-47de-b3f4-f6519b75064f\") " Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.230513 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.231236 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.231228 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.231685 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.232387 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.238950 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.239161 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f28ff403-47a1-47de-b3f4-f6519b75064f-kube-api-access-whdh7" (OuterVolumeSpecName: "kube-api-access-whdh7") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "kube-api-access-whdh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.242703 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.251038 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.251857 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.252079 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.252340 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.252495 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.252736 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f28ff403-47a1-47de-b3f4-f6519b75064f" (UID: "f28ff403-47a1-47de-b3f4-f6519b75064f"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332249 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332300 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whdh7\" (UniqueName: \"kubernetes.io/projected/f28ff403-47a1-47de-b3f4-f6519b75064f-kube-api-access-whdh7\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332315 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332332 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332349 4811 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332365 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332375 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332389 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332403 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332416 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332449 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332464 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332477 4811 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f28ff403-47a1-47de-b3f4-f6519b75064f-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:05 crc kubenswrapper[4811]: I0128 15:50:05.332488 4811 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f28ff403-47a1-47de-b3f4-f6519b75064f-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:06 crc kubenswrapper[4811]: I0128 15:50:06.107758 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" event={"ID":"f28ff403-47a1-47de-b3f4-f6519b75064f","Type":"ContainerDied","Data":"e234cced49affed23aea5123cb526fc34527ec0add90b5c6d44d5d8d7b9be057"} Jan 28 15:50:06 crc kubenswrapper[4811]: I0128 15:50:06.107822 4811 scope.go:117] "RemoveContainer" containerID="84169e25180ec9a14d024ee723345e5a801b6eba011a74a1f79967734c92f32a" Jan 28 15:50:06 crc kubenswrapper[4811]: I0128 15:50:06.107874 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dgnfz" Jan 28 15:50:06 crc kubenswrapper[4811]: I0128 15:50:06.547629 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:50:06 crc kubenswrapper[4811]: I0128 15:50:06.551935 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:50:07 crc kubenswrapper[4811]: I0128 15:50:07.118698 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 28 15:50:08 crc kubenswrapper[4811]: I0128 15:50:08.146891 4811 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 28 15:50:08 crc kubenswrapper[4811]: I0128 15:50:08.355975 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="1d835bc8-991f-41fe-8492-995139344813" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.004741 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.217920 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.272132 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.407742 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.642864 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.717584 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.718797 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 28 15:50:12 crc kubenswrapper[4811]: I0128 15:50:12.729187 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 28 15:50:13 crc kubenswrapper[4811]: I0128 15:50:13.274628 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.235827 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.276736 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.333139 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.528613 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.648464 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.695926 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.723214 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 28 15:50:14 crc kubenswrapper[4811]: I0128 15:50:14.985757 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.054667 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.075008 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.106882 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.161502 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.187614 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.254784 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.268618 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.455919 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.545820 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.582767 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.588685 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 28 15:50:15 crc kubenswrapper[4811]: I0128 15:50:15.766579 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.218177 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.233153 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.287167 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.315047 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.319608 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.474609 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.511733 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.799906 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.800267 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 28 15:50:16 crc kubenswrapper[4811]: I0128 15:50:16.929797 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.006841 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.138901 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.224791 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.229780 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.249807 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.342775 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.453337 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.465871 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.518479 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.562723 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.633570 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.673957 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.701918 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.709574 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.793826 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.825384 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 28 15:50:17 crc kubenswrapper[4811]: I0128 15:50:17.949540 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.012812 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.153383 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.273418 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.307307 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.388626 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.424000 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.502211 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.533262 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.639303 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.705591 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.884669 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.903094 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 28 15:50:18 crc kubenswrapper[4811]: I0128 15:50:18.965823 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.012800 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.022565 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.023769 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.031643 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.039801 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.040738 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.057710 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.216130 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.262881 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.311867 4811 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.323472 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.353291 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.365735 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.401371 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.467986 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.488057 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.496042 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.540344 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.558491 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.644125 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.648401 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.731312 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.860889 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.874253 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 28 15:50:19 crc kubenswrapper[4811]: I0128 15:50:19.896293 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.003261 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.061964 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.078072 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.082386 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.181642 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.245292 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.319757 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.322401 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.331189 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.334238 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.487082 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.534925 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.600364 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.666795 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.711488 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.743899 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.815416 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.931834 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.959564 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 28 15:50:20 crc kubenswrapper[4811]: I0128 15:50:20.988862 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.016924 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.145707 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.188262 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.316226 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.316305 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.338962 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.388475 4811 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.406607 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.411419 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.445305 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.458673 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.562769 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.646537 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.662038 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.792807 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.851228 4811 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.870698 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 28 15:50:21 crc kubenswrapper[4811]: I0128 15:50:21.972613 4811 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.004032 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.021284 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.041856 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.073641 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.311051 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.345111 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.439126 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.489482 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.491757 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.587342 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.623734 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.672862 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.751063 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.880473 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.903891 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.911266 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 28 15:50:22 crc kubenswrapper[4811]: I0128 15:50:22.955175 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.017679 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.133136 4811 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.182044 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.194379 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.243931 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.268317 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.425825 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.427125 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.483321 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.532308 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.588472 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.606083 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.627282 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.673619 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.736992 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.738608 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.842796 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 28 15:50:23 crc kubenswrapper[4811]: I0128 15:50:23.962256 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.053375 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.079470 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.234950 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.461274 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.474612 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.475864 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.551985 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.564278 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.578481 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.678381 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.714459 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.785284 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.917091 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 28 15:50:24 crc kubenswrapper[4811]: I0128 15:50:24.980347 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.015191 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.043646 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.108814 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.202917 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.260635 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.310232 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.478854 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.630128 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.712976 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.767175 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.810636 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.811466 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 28 15:50:25 crc kubenswrapper[4811]: I0128 15:50:25.940862 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.134083 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.270704 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.320073 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.429207 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.500898 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.528638 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.633728 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.667925 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.682107 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.683850 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.801378 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.847869 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.848671 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.893830 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 28 15:50:26 crc kubenswrapper[4811]: I0128 15:50:26.894231 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.140738 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.337987 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.373696 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.494592 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.589022 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.614278 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 28 15:50:27 crc kubenswrapper[4811]: I0128 15:50:27.855256 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.044983 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.052801 4811 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.054575 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.058127 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dgnfz","openshift-marketplace/certified-operators-mqvcx","openshift-kube-apiserver/kube-apiserver-crc"] Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.058203 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-565657c9dc-8x4ss"] Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.058937 4811 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.058971 4811 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a95a634-aec8-42cd-889d-ababff22b6f2" Jan 28 15:50:28 crc kubenswrapper[4811]: E0128 15:50:28.059363 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" containerName="installer" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.059382 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" containerName="installer" Jan 28 15:50:28 crc kubenswrapper[4811]: E0128 15:50:28.059454 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f28ff403-47a1-47de-b3f4-f6519b75064f" containerName="oauth-openshift" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.059470 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f28ff403-47a1-47de-b3f4-f6519b75064f" containerName="oauth-openshift" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.060687 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f28ff403-47a1-47de-b3f4-f6519b75064f" containerName="oauth-openshift" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.060728 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a4207c-2c5f-428a-ae4f-ae0adbc4a6e7" containerName="installer" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.069023 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.073041 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.074728 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.074966 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.075105 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.075244 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.076273 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.079474 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.079725 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.079789 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.079790 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.080312 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.081729 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.082217 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.085393 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.106569 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.106858 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.144730 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.145016 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=26.144998094 podStartE2EDuration="26.144998094s" podCreationTimestamp="2026-01-28 15:50:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:50:28.144416189 +0000 UTC m=+320.898779772" watchObservedRunningTime="2026-01-28 15:50:28.144998094 +0000 UTC m=+320.899361677" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.221027 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228367 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e484b22d-bd34-44c6-af84-9a67c3cc3940-audit-dir\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228479 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-login\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228572 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv9lb\" (UniqueName: \"kubernetes.io/projected/e484b22d-bd34-44c6-af84-9a67c3cc3940-kube-api-access-pv9lb\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228612 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-session\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228710 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-error\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228777 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-audit-policies\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228831 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-cliconfig\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228900 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-service-ca\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228928 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.228985 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-router-certs\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.229023 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.229060 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.229087 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.229177 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-serving-cert\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.241181 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.273089 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.279782 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330565 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv9lb\" (UniqueName: \"kubernetes.io/projected/e484b22d-bd34-44c6-af84-9a67c3cc3940-kube-api-access-pv9lb\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330608 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-session\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330636 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-error\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330661 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-audit-policies\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330698 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-cliconfig\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330733 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-service-ca\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330752 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330775 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-router-certs\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330795 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330813 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330834 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330856 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-serving-cert\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330875 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e484b22d-bd34-44c6-af84-9a67c3cc3940-audit-dir\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.330899 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-login\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.331569 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-cliconfig\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.331690 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-audit-policies\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.331782 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e484b22d-bd34-44c6-af84-9a67c3cc3940-audit-dir\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.331829 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-service-ca\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.332656 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.336828 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-error\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.336961 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-router-certs\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.337347 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-serving-cert\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.338243 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-session\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.338494 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.339984 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.343155 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-user-template-login\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.348773 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f806cee-7e54-4979-a1a2-d42a4c892013" path="/var/lib/kubelet/pods/5f806cee-7e54-4979-a1a2-d42a4c892013/volumes" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.349662 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f28ff403-47a1-47de-b3f4-f6519b75064f" path="/var/lib/kubelet/pods/f28ff403-47a1-47de-b3f4-f6519b75064f/volumes" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.351152 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e484b22d-bd34-44c6-af84-9a67c3cc3940-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.352638 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv9lb\" (UniqueName: \"kubernetes.io/projected/e484b22d-bd34-44c6-af84-9a67c3cc3940-kube-api-access-pv9lb\") pod \"oauth-openshift-565657c9dc-8x4ss\" (UID: \"e484b22d-bd34-44c6-af84-9a67c3cc3940\") " pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.410464 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.573920 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.671644 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.672244 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 28 15:50:28 crc kubenswrapper[4811]: I0128 15:50:28.814659 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-565657c9dc-8x4ss"] Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.029814 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.064733 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.122977 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.183782 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.221537 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" event={"ID":"e484b22d-bd34-44c6-af84-9a67c3cc3940","Type":"ContainerStarted","Data":"0d89bb613f591bd4b764a4132bfb93b87af6569e4dfd34c440f3f7fe0ce6a9e6"} Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.221587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" event={"ID":"e484b22d-bd34-44c6-af84-9a67c3cc3940","Type":"ContainerStarted","Data":"996a427c52d566a3596f77d7f38355a3cde738d0bd66570e4d5c91003993a60a"} Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.221912 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.244281 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" podStartSLOduration=50.244262296 podStartE2EDuration="50.244262296s" podCreationTimestamp="2026-01-28 15:49:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:50:29.241516272 +0000 UTC m=+321.995879865" watchObservedRunningTime="2026-01-28 15:50:29.244262296 +0000 UTC m=+321.998625879" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.330659 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.468760 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.491958 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-565657c9dc-8x4ss" Jan 28 15:50:29 crc kubenswrapper[4811]: I0128 15:50:29.508314 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 28 15:50:30 crc kubenswrapper[4811]: I0128 15:50:30.023982 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 28 15:50:30 crc kubenswrapper[4811]: I0128 15:50:30.765830 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 28 15:50:30 crc kubenswrapper[4811]: I0128 15:50:30.953763 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 28 15:50:34 crc kubenswrapper[4811]: I0128 15:50:34.087583 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7d4588949f-27gc6"] Jan 28 15:50:34 crc kubenswrapper[4811]: I0128 15:50:34.088084 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" podUID="abacc063-d637-43aa-8456-e88063ae9d69" containerName="controller-manager" containerID="cri-o://a2b6ab2aa4b5e5e9162660191b2f2d5a16288e25e58e0a7a4c25adc07a4b2da9" gracePeriod=30 Jan 28 15:50:34 crc kubenswrapper[4811]: I0128 15:50:34.187066 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4"] Jan 28 15:50:34 crc kubenswrapper[4811]: I0128 15:50:34.187301 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" podUID="4a79731b-e3a0-4bf4-8ca2-985516ee313e" containerName="route-controller-manager" containerID="cri-o://13d77925eadc21a87f3ff21518e989e13b88c4fd3c47b0c7a52dc7f2ced9a7ff" gracePeriod=30 Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.251834 4811 generic.go:334] "Generic (PLEG): container finished" podID="abacc063-d637-43aa-8456-e88063ae9d69" containerID="a2b6ab2aa4b5e5e9162660191b2f2d5a16288e25e58e0a7a4c25adc07a4b2da9" exitCode=0 Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.251919 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" event={"ID":"abacc063-d637-43aa-8456-e88063ae9d69","Type":"ContainerDied","Data":"a2b6ab2aa4b5e5e9162660191b2f2d5a16288e25e58e0a7a4c25adc07a4b2da9"} Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.254351 4811 generic.go:334] "Generic (PLEG): container finished" podID="4a79731b-e3a0-4bf4-8ca2-985516ee313e" containerID="13d77925eadc21a87f3ff21518e989e13b88c4fd3c47b0c7a52dc7f2ced9a7ff" exitCode=0 Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.254392 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" event={"ID":"4a79731b-e3a0-4bf4-8ca2-985516ee313e","Type":"ContainerDied","Data":"13d77925eadc21a87f3ff21518e989e13b88c4fd3c47b0c7a52dc7f2ced9a7ff"} Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.523705 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.564470 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn"] Jan 28 15:50:35 crc kubenswrapper[4811]: E0128 15:50:35.564915 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a79731b-e3a0-4bf4-8ca2-985516ee313e" containerName="route-controller-manager" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.564940 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a79731b-e3a0-4bf4-8ca2-985516ee313e" containerName="route-controller-manager" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.565127 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a79731b-e3a0-4bf4-8ca2-985516ee313e" containerName="route-controller-manager" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.565744 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.572093 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn"] Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.643410 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.660607 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-client-ca\") pod \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.663057 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-client-ca" (OuterVolumeSpecName: "client-ca") pod "4a79731b-e3a0-4bf4-8ca2-985516ee313e" (UID: "4a79731b-e3a0-4bf4-8ca2-985516ee313e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.663238 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zssw\" (UniqueName: \"kubernetes.io/projected/4a79731b-e3a0-4bf4-8ca2-985516ee313e-kube-api-access-2zssw\") pod \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.664115 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abacc063-d637-43aa-8456-e88063ae9d69-serving-cert\") pod \"abacc063-d637-43aa-8456-e88063ae9d69\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.664162 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a79731b-e3a0-4bf4-8ca2-985516ee313e-serving-cert\") pod \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.664204 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-config\") pod \"abacc063-d637-43aa-8456-e88063ae9d69\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.664235 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-proxy-ca-bundles\") pod \"abacc063-d637-43aa-8456-e88063ae9d69\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.664263 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-config\") pod \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\" (UID: \"4a79731b-e3a0-4bf4-8ca2-985516ee313e\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.664284 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-client-ca\") pod \"abacc063-d637-43aa-8456-e88063ae9d69\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665215 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-config" (OuterVolumeSpecName: "config") pod "4a79731b-e3a0-4bf4-8ca2-985516ee313e" (UID: "4a79731b-e3a0-4bf4-8ca2-985516ee313e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665245 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "abacc063-d637-43aa-8456-e88063ae9d69" (UID: "abacc063-d637-43aa-8456-e88063ae9d69"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665374 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r5ml\" (UniqueName: \"kubernetes.io/projected/98999c63-78af-47fc-b47c-e6d2eade6033-kube-api-access-5r5ml\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665374 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-config" (OuterVolumeSpecName: "config") pod "abacc063-d637-43aa-8456-e88063ae9d69" (UID: "abacc063-d637-43aa-8456-e88063ae9d69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665464 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-config\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665501 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98999c63-78af-47fc-b47c-e6d2eade6033-serving-cert\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665548 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-client-ca\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.665994 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-client-ca" (OuterVolumeSpecName: "client-ca") pod "abacc063-d637-43aa-8456-e88063ae9d69" (UID: "abacc063-d637-43aa-8456-e88063ae9d69"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.666114 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.666142 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.666153 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.666166 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a79731b-e3a0-4bf4-8ca2-985516ee313e-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.666176 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abacc063-d637-43aa-8456-e88063ae9d69-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.669694 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a79731b-e3a0-4bf4-8ca2-985516ee313e-kube-api-access-2zssw" (OuterVolumeSpecName: "kube-api-access-2zssw") pod "4a79731b-e3a0-4bf4-8ca2-985516ee313e" (UID: "4a79731b-e3a0-4bf4-8ca2-985516ee313e"). InnerVolumeSpecName "kube-api-access-2zssw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.670759 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abacc063-d637-43aa-8456-e88063ae9d69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "abacc063-d637-43aa-8456-e88063ae9d69" (UID: "abacc063-d637-43aa-8456-e88063ae9d69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.673613 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a79731b-e3a0-4bf4-8ca2-985516ee313e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4a79731b-e3a0-4bf4-8ca2-985516ee313e" (UID: "4a79731b-e3a0-4bf4-8ca2-985516ee313e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.766742 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdm7t\" (UniqueName: \"kubernetes.io/projected/abacc063-d637-43aa-8456-e88063ae9d69-kube-api-access-sdm7t\") pod \"abacc063-d637-43aa-8456-e88063ae9d69\" (UID: \"abacc063-d637-43aa-8456-e88063ae9d69\") " Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.766932 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r5ml\" (UniqueName: \"kubernetes.io/projected/98999c63-78af-47fc-b47c-e6d2eade6033-kube-api-access-5r5ml\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.766968 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-config\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.766999 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98999c63-78af-47fc-b47c-e6d2eade6033-serving-cert\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.767024 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-client-ca\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.767087 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zssw\" (UniqueName: \"kubernetes.io/projected/4a79731b-e3a0-4bf4-8ca2-985516ee313e-kube-api-access-2zssw\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.767104 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abacc063-d637-43aa-8456-e88063ae9d69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.767117 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a79731b-e3a0-4bf4-8ca2-985516ee313e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.768143 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-client-ca\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.768737 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-config\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.770608 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abacc063-d637-43aa-8456-e88063ae9d69-kube-api-access-sdm7t" (OuterVolumeSpecName: "kube-api-access-sdm7t") pod "abacc063-d637-43aa-8456-e88063ae9d69" (UID: "abacc063-d637-43aa-8456-e88063ae9d69"). InnerVolumeSpecName "kube-api-access-sdm7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.771246 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98999c63-78af-47fc-b47c-e6d2eade6033-serving-cert\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.826300 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r5ml\" (UniqueName: \"kubernetes.io/projected/98999c63-78af-47fc-b47c-e6d2eade6033-kube-api-access-5r5ml\") pod \"route-controller-manager-df5b59c7f-fjlcn\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.870244 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdm7t\" (UniqueName: \"kubernetes.io/projected/abacc063-d637-43aa-8456-e88063ae9d69-kube-api-access-sdm7t\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.891706 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.953258 4811 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 28 15:50:35 crc kubenswrapper[4811]: I0128 15:50:35.953530 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://7c606018902fca6d0a8194da0685983026fde56f75909c7ce0994d66cf38a45d" gracePeriod=5 Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.106550 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn"] Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.260874 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" event={"ID":"98999c63-78af-47fc-b47c-e6d2eade6033","Type":"ContainerStarted","Data":"a1b4972f2929e546ff0a019fe428557844ad7e0129b51fb73ffdc0b66dfdc8a8"} Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.260923 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" event={"ID":"98999c63-78af-47fc-b47c-e6d2eade6033","Type":"ContainerStarted","Data":"88fc1c915a74eccb7156dcc9bfb37aa365db77d51521ce061d8d51576eba5467"} Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.261383 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.262625 4811 patch_prober.go:28] interesting pod/route-controller-manager-df5b59c7f-fjlcn container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: connect: connection refused" start-of-body= Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.262665 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" podUID="98999c63-78af-47fc-b47c-e6d2eade6033" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: connect: connection refused" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.263151 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.263404 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4588949f-27gc6" event={"ID":"abacc063-d637-43aa-8456-e88063ae9d69","Type":"ContainerDied","Data":"fa71079d38d8a33c6023f5c4c07613278a314e2c6b2c935f149beb116d3c299c"} Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.263491 4811 scope.go:117] "RemoveContainer" containerID="a2b6ab2aa4b5e5e9162660191b2f2d5a16288e25e58e0a7a4c25adc07a4b2da9" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.275248 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" event={"ID":"4a79731b-e3a0-4bf4-8ca2-985516ee313e","Type":"ContainerDied","Data":"75beef60ac78453e1e17e3d9c2449431e59bab736b12983dd616cd6d1426b988"} Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.275321 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.290987 4811 scope.go:117] "RemoveContainer" containerID="13d77925eadc21a87f3ff21518e989e13b88c4fd3c47b0c7a52dc7f2ced9a7ff" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.296846 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" podStartSLOduration=2.296825637 podStartE2EDuration="2.296825637s" podCreationTimestamp="2026-01-28 15:50:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:50:36.287647797 +0000 UTC m=+329.042011380" watchObservedRunningTime="2026-01-28 15:50:36.296825637 +0000 UTC m=+329.051189220" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.312622 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7d4588949f-27gc6"] Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.321468 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7d4588949f-27gc6"] Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.327176 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4"] Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.331455 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-f5dbfb54b-p5jv4"] Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.346085 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a79731b-e3a0-4bf4-8ca2-985516ee313e" path="/var/lib/kubelet/pods/4a79731b-e3a0-4bf4-8ca2-985516ee313e/volumes" Jan 28 15:50:36 crc kubenswrapper[4811]: I0128 15:50:36.346610 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abacc063-d637-43aa-8456-e88063ae9d69" path="/var/lib/kubelet/pods/abacc063-d637-43aa-8456-e88063ae9d69/volumes" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.286732 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.761220 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-684c967ddf-w9xwk"] Jan 28 15:50:37 crc kubenswrapper[4811]: E0128 15:50:37.761546 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.761565 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 28 15:50:37 crc kubenswrapper[4811]: E0128 15:50:37.761578 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abacc063-d637-43aa-8456-e88063ae9d69" containerName="controller-manager" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.761587 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="abacc063-d637-43aa-8456-e88063ae9d69" containerName="controller-manager" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.761766 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="abacc063-d637-43aa-8456-e88063ae9d69" containerName="controller-manager" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.761787 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.762290 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.764474 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.765073 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.765232 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.765410 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.765649 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.765938 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.772875 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-684c967ddf-w9xwk"] Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.781617 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.899507 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-client-ca\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.899557 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-serving-cert\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.899584 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-config\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.899659 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-proxy-ca-bundles\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:37 crc kubenswrapper[4811]: I0128 15:50:37.899690 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dt2f\" (UniqueName: \"kubernetes.io/projected/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-kube-api-access-6dt2f\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.000963 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-client-ca\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.001015 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-serving-cert\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.001041 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-config\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.001070 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-proxy-ca-bundles\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.001093 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dt2f\" (UniqueName: \"kubernetes.io/projected/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-kube-api-access-6dt2f\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.002167 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-client-ca\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.003533 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-proxy-ca-bundles\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.004795 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-config\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.006787 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-serving-cert\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.018613 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dt2f\" (UniqueName: \"kubernetes.io/projected/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-kube-api-access-6dt2f\") pod \"controller-manager-684c967ddf-w9xwk\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.089307 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:38 crc kubenswrapper[4811]: I0128 15:50:38.496878 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-684c967ddf-w9xwk"] Jan 28 15:50:38 crc kubenswrapper[4811]: W0128 15:50:38.521213 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9bbf086_4ea5_4c4e_927a_e1d63a3aa26d.slice/crio-08a2bc2f96025bec7b4f126b6e325e2c01e50591488002df428f240e5cbb05d0 WatchSource:0}: Error finding container 08a2bc2f96025bec7b4f126b6e325e2c01e50591488002df428f240e5cbb05d0: Status 404 returned error can't find the container with id 08a2bc2f96025bec7b4f126b6e325e2c01e50591488002df428f240e5cbb05d0 Jan 28 15:50:39 crc kubenswrapper[4811]: I0128 15:50:39.295545 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" event={"ID":"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d","Type":"ContainerStarted","Data":"e92032809306b1600b48093553805339ad745700ec54e7c10fd78e4a87ca8db7"} Jan 28 15:50:39 crc kubenswrapper[4811]: I0128 15:50:39.295981 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:39 crc kubenswrapper[4811]: I0128 15:50:39.296003 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" event={"ID":"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d","Type":"ContainerStarted","Data":"08a2bc2f96025bec7b4f126b6e325e2c01e50591488002df428f240e5cbb05d0"} Jan 28 15:50:39 crc kubenswrapper[4811]: I0128 15:50:39.301800 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:39 crc kubenswrapper[4811]: I0128 15:50:39.315553 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" podStartSLOduration=5.315532404 podStartE2EDuration="5.315532404s" podCreationTimestamp="2026-01-28 15:50:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:50:39.31100013 +0000 UTC m=+332.065363723" watchObservedRunningTime="2026-01-28 15:50:39.315532404 +0000 UTC m=+332.069895987" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.309171 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.309508 4811 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="7c606018902fca6d0a8194da0685983026fde56f75909c7ce0994d66cf38a45d" exitCode=137 Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.675480 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.675609 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.784184 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785172 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785326 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.784604 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785238 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785647 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785700 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785813 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.785860 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.786240 4811 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.786308 4811 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.786368 4811 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.786457 4811 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.797684 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:50:41 crc kubenswrapper[4811]: I0128 15:50:41.888013 4811 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:42 crc kubenswrapper[4811]: I0128 15:50:42.324218 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 28 15:50:42 crc kubenswrapper[4811]: I0128 15:50:42.324301 4811 scope.go:117] "RemoveContainer" containerID="7c606018902fca6d0a8194da0685983026fde56f75909c7ce0994d66cf38a45d" Jan 28 15:50:42 crc kubenswrapper[4811]: I0128 15:50:42.324349 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 28 15:50:42 crc kubenswrapper[4811]: I0128 15:50:42.349692 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.052968 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-684c967ddf-w9xwk"] Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.053736 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" podUID="a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" containerName="controller-manager" containerID="cri-o://e92032809306b1600b48093553805339ad745700ec54e7c10fd78e4a87ca8db7" gracePeriod=30 Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.078594 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn"] Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.079232 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" podUID="98999c63-78af-47fc-b47c-e6d2eade6033" containerName="route-controller-manager" containerID="cri-o://a1b4972f2929e546ff0a019fe428557844ad7e0129b51fb73ffdc0b66dfdc8a8" gracePeriod=30 Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.408812 4811 generic.go:334] "Generic (PLEG): container finished" podID="a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" containerID="e92032809306b1600b48093553805339ad745700ec54e7c10fd78e4a87ca8db7" exitCode=0 Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.408945 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" event={"ID":"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d","Type":"ContainerDied","Data":"e92032809306b1600b48093553805339ad745700ec54e7c10fd78e4a87ca8db7"} Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.411033 4811 generic.go:334] "Generic (PLEG): container finished" podID="98999c63-78af-47fc-b47c-e6d2eade6033" containerID="a1b4972f2929e546ff0a019fe428557844ad7e0129b51fb73ffdc0b66dfdc8a8" exitCode=0 Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.411245 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" event={"ID":"98999c63-78af-47fc-b47c-e6d2eade6033","Type":"ContainerDied","Data":"a1b4972f2929e546ff0a019fe428557844ad7e0129b51fb73ffdc0b66dfdc8a8"} Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.618343 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.698956 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.761103 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5r5ml\" (UniqueName: \"kubernetes.io/projected/98999c63-78af-47fc-b47c-e6d2eade6033-kube-api-access-5r5ml\") pod \"98999c63-78af-47fc-b47c-e6d2eade6033\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.761147 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-client-ca\") pod \"98999c63-78af-47fc-b47c-e6d2eade6033\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.761216 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98999c63-78af-47fc-b47c-e6d2eade6033-serving-cert\") pod \"98999c63-78af-47fc-b47c-e6d2eade6033\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.761250 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-config\") pod \"98999c63-78af-47fc-b47c-e6d2eade6033\" (UID: \"98999c63-78af-47fc-b47c-e6d2eade6033\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.762293 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-config" (OuterVolumeSpecName: "config") pod "98999c63-78af-47fc-b47c-e6d2eade6033" (UID: "98999c63-78af-47fc-b47c-e6d2eade6033"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.763402 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-client-ca" (OuterVolumeSpecName: "client-ca") pod "98999c63-78af-47fc-b47c-e6d2eade6033" (UID: "98999c63-78af-47fc-b47c-e6d2eade6033"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.772725 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98999c63-78af-47fc-b47c-e6d2eade6033-kube-api-access-5r5ml" (OuterVolumeSpecName: "kube-api-access-5r5ml") pod "98999c63-78af-47fc-b47c-e6d2eade6033" (UID: "98999c63-78af-47fc-b47c-e6d2eade6033"). InnerVolumeSpecName "kube-api-access-5r5ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.787775 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98999c63-78af-47fc-b47c-e6d2eade6033-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "98999c63-78af-47fc-b47c-e6d2eade6033" (UID: "98999c63-78af-47fc-b47c-e6d2eade6033"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862276 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dt2f\" (UniqueName: \"kubernetes.io/projected/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-kube-api-access-6dt2f\") pod \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862376 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-proxy-ca-bundles\") pod \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862485 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-config\") pod \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862525 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-serving-cert\") pod \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862553 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-client-ca\") pod \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\" (UID: \"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d\") " Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862731 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862746 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5r5ml\" (UniqueName: \"kubernetes.io/projected/98999c63-78af-47fc-b47c-e6d2eade6033-kube-api-access-5r5ml\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862757 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/98999c63-78af-47fc-b47c-e6d2eade6033-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.862765 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98999c63-78af-47fc-b47c-e6d2eade6033-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.863226 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" (UID: "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.863280 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-client-ca" (OuterVolumeSpecName: "client-ca") pod "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" (UID: "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.863456 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-config" (OuterVolumeSpecName: "config") pod "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" (UID: "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.866686 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-kube-api-access-6dt2f" (OuterVolumeSpecName: "kube-api-access-6dt2f") pod "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" (UID: "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d"). InnerVolumeSpecName "kube-api-access-6dt2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.866826 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" (UID: "a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.963768 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dt2f\" (UniqueName: \"kubernetes.io/projected/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-kube-api-access-6dt2f\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.963818 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.963833 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.963846 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:54 crc kubenswrapper[4811]: I0128 15:50:54.963860 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.419067 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" event={"ID":"98999c63-78af-47fc-b47c-e6d2eade6033","Type":"ContainerDied","Data":"88fc1c915a74eccb7156dcc9bfb37aa365db77d51521ce061d8d51576eba5467"} Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.419109 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.419619 4811 scope.go:117] "RemoveContainer" containerID="a1b4972f2929e546ff0a019fe428557844ad7e0129b51fb73ffdc0b66dfdc8a8" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.420713 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" event={"ID":"a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d","Type":"ContainerDied","Data":"08a2bc2f96025bec7b4f126b6e325e2c01e50591488002df428f240e5cbb05d0"} Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.420795 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-684c967ddf-w9xwk" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.440016 4811 scope.go:117] "RemoveContainer" containerID="e92032809306b1600b48093553805339ad745700ec54e7c10fd78e4a87ca8db7" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.459065 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-684c967ddf-w9xwk"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.468629 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-684c967ddf-w9xwk"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.472752 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.476037 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-df5b59c7f-fjlcn"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.773955 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf"] Jan 28 15:50:55 crc kubenswrapper[4811]: E0128 15:50:55.774259 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" containerName="controller-manager" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.774277 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" containerName="controller-manager" Jan 28 15:50:55 crc kubenswrapper[4811]: E0128 15:50:55.774304 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98999c63-78af-47fc-b47c-e6d2eade6033" containerName="route-controller-manager" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.774313 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="98999c63-78af-47fc-b47c-e6d2eade6033" containerName="route-controller-manager" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.774501 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="98999c63-78af-47fc-b47c-e6d2eade6033" containerName="route-controller-manager" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.774523 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" containerName="controller-manager" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.774960 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.777058 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.777762 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.778112 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.778499 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.778749 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.781671 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.784861 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-74ffcbf647-hd2sr"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.786101 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.789611 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.790291 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.790499 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.790824 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.791714 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.797744 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.798274 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.804465 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74ffcbf647-hd2sr"] Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.807985 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.876377 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-client-ca\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.876512 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-client-ca\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.876712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-config\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.876876 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c9ca82-2caf-4dff-9293-a3a281c2705e-serving-cert\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.876914 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-config\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.876943 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8vqj\" (UniqueName: \"kubernetes.io/projected/b8099c49-c944-461f-b39b-5584c75e5f3c-kube-api-access-r8vqj\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.877171 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxhpb\" (UniqueName: \"kubernetes.io/projected/39c9ca82-2caf-4dff-9293-a3a281c2705e-kube-api-access-hxhpb\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.877247 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-proxy-ca-bundles\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.877366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8099c49-c944-461f-b39b-5584c75e5f3c-serving-cert\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979109 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-client-ca\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979187 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-client-ca\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979214 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-config\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979236 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c9ca82-2caf-4dff-9293-a3a281c2705e-serving-cert\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979263 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-config\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979306 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8vqj\" (UniqueName: \"kubernetes.io/projected/b8099c49-c944-461f-b39b-5584c75e5f3c-kube-api-access-r8vqj\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979343 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxhpb\" (UniqueName: \"kubernetes.io/projected/39c9ca82-2caf-4dff-9293-a3a281c2705e-kube-api-access-hxhpb\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979370 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-proxy-ca-bundles\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.979396 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8099c49-c944-461f-b39b-5584c75e5f3c-serving-cert\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.980355 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-client-ca\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.980866 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-proxy-ca-bundles\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.981123 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-config\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.981389 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-config\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.981721 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-client-ca\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.984958 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c9ca82-2caf-4dff-9293-a3a281c2705e-serving-cert\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.996057 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8099c49-c944-461f-b39b-5584c75e5f3c-serving-cert\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:55 crc kubenswrapper[4811]: I0128 15:50:55.998227 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxhpb\" (UniqueName: \"kubernetes.io/projected/39c9ca82-2caf-4dff-9293-a3a281c2705e-kube-api-access-hxhpb\") pod \"controller-manager-74ffcbf647-hd2sr\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.000152 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8vqj\" (UniqueName: \"kubernetes.io/projected/b8099c49-c944-461f-b39b-5584c75e5f3c-kube-api-access-r8vqj\") pod \"route-controller-manager-d99cb9764-wqfmf\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.106344 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.118668 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.355791 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98999c63-78af-47fc-b47c-e6d2eade6033" path="/var/lib/kubelet/pods/98999c63-78af-47fc-b47c-e6d2eade6033/volumes" Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.357286 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d" path="/var/lib/kubelet/pods/a9bbf086-4ea5-4c4e-927a-e1d63a3aa26d/volumes" Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.546729 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74ffcbf647-hd2sr"] Jan 28 15:50:56 crc kubenswrapper[4811]: I0128 15:50:56.609291 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf"] Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.436667 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" event={"ID":"39c9ca82-2caf-4dff-9293-a3a281c2705e","Type":"ContainerStarted","Data":"43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6"} Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.437347 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" event={"ID":"39c9ca82-2caf-4dff-9293-a3a281c2705e","Type":"ContainerStarted","Data":"3921fb1ec0859e60434a7c4935ee898b8ec981ffabe4c31c641372965a2f88c7"} Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.437377 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.438094 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" event={"ID":"b8099c49-c944-461f-b39b-5584c75e5f3c","Type":"ContainerStarted","Data":"e0ef3f477059e34873e4e33687246e1147f33c8345df1b9414fd846d0f088da6"} Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.438121 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" event={"ID":"b8099c49-c944-461f-b39b-5584c75e5f3c","Type":"ContainerStarted","Data":"96e6f1c6a0a62fb99dcbe65cdc1be33097e39a1b62a1d5ef6853eb6ccd06321d"} Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.438555 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.442350 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.443617 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.460772 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" podStartSLOduration=3.460747903 podStartE2EDuration="3.460747903s" podCreationTimestamp="2026-01-28 15:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:50:57.456766844 +0000 UTC m=+350.211130427" watchObservedRunningTime="2026-01-28 15:50:57.460747903 +0000 UTC m=+350.215111486" Jan 28 15:50:57 crc kubenswrapper[4811]: I0128 15:50:57.497448 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" podStartSLOduration=3.497401621 podStartE2EDuration="3.497401621s" podCreationTimestamp="2026-01-28 15:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:50:57.494711598 +0000 UTC m=+350.249075201" watchObservedRunningTime="2026-01-28 15:50:57.497401621 +0000 UTC m=+350.251765204" Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.088142 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.088775 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.394623 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zkcxm"] Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.394983 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zkcxm" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="registry-server" containerID="cri-o://8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630" gracePeriod=2 Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.849658 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.993780 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-utilities\") pod \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.994148 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjwfm\" (UniqueName: \"kubernetes.io/projected/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-kube-api-access-sjwfm\") pod \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.994192 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-catalog-content\") pod \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\" (UID: \"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d\") " Jan 28 15:51:03 crc kubenswrapper[4811]: I0128 15:51:03.995043 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-utilities" (OuterVolumeSpecName: "utilities") pod "d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" (UID: "d23adda9-57ba-4cbf-be0a-6cec03fbdc1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.000513 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-kube-api-access-sjwfm" (OuterVolumeSpecName: "kube-api-access-sjwfm") pod "d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" (UID: "d23adda9-57ba-4cbf-be0a-6cec03fbdc1d"). InnerVolumeSpecName "kube-api-access-sjwfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.044517 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" (UID: "d23adda9-57ba-4cbf-be0a-6cec03fbdc1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.095322 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjwfm\" (UniqueName: \"kubernetes.io/projected/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-kube-api-access-sjwfm\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.095388 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.095400 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.486246 4811 generic.go:334] "Generic (PLEG): container finished" podID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerID="8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630" exitCode=0 Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.486293 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerDied","Data":"8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630"} Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.486308 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zkcxm" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.486321 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkcxm" event={"ID":"d23adda9-57ba-4cbf-be0a-6cec03fbdc1d","Type":"ContainerDied","Data":"bb0210b6977cb36609b1de56dc26110ab8090418dfaa2ad986d51806aa2ad916"} Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.486345 4811 scope.go:117] "RemoveContainer" containerID="8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.506572 4811 scope.go:117] "RemoveContainer" containerID="87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.513173 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zkcxm"] Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.516888 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zkcxm"] Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.524353 4811 scope.go:117] "RemoveContainer" containerID="8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.542612 4811 scope.go:117] "RemoveContainer" containerID="8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630" Jan 28 15:51:04 crc kubenswrapper[4811]: E0128 15:51:04.543714 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630\": container with ID starting with 8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630 not found: ID does not exist" containerID="8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.543773 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630"} err="failed to get container status \"8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630\": rpc error: code = NotFound desc = could not find container \"8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630\": container with ID starting with 8230eb696175ff2964e5ec3e8120dfaff977a531aec7015f1ea10fc0755d2630 not found: ID does not exist" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.543811 4811 scope.go:117] "RemoveContainer" containerID="87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa" Jan 28 15:51:04 crc kubenswrapper[4811]: E0128 15:51:04.544544 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa\": container with ID starting with 87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa not found: ID does not exist" containerID="87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.544587 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa"} err="failed to get container status \"87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa\": rpc error: code = NotFound desc = could not find container \"87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa\": container with ID starting with 87f2bd2b4ed99cad7332795fb61f9d973ba3d0500163ea2c72f7dcf7d974adaa not found: ID does not exist" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.544617 4811 scope.go:117] "RemoveContainer" containerID="8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4" Jan 28 15:51:04 crc kubenswrapper[4811]: E0128 15:51:04.545383 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4\": container with ID starting with 8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4 not found: ID does not exist" containerID="8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4" Jan 28 15:51:04 crc kubenswrapper[4811]: I0128 15:51:04.545454 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4"} err="failed to get container status \"8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4\": rpc error: code = NotFound desc = could not find container \"8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4\": container with ID starting with 8cce1ee932522c57a8e0aae95d41753191330ecc12e4ede5f97125a3217972f4 not found: ID does not exist" Jan 28 15:51:06 crc kubenswrapper[4811]: I0128 15:51:06.348294 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" path="/var/lib/kubelet/pods/d23adda9-57ba-4cbf-be0a-6cec03fbdc1d/volumes" Jan 28 15:51:33 crc kubenswrapper[4811]: I0128 15:51:33.088091 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:51:33 crc kubenswrapper[4811]: I0128 15:51:33.088920 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:51:54 crc kubenswrapper[4811]: I0128 15:51:54.050401 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf"] Jan 28 15:51:54 crc kubenswrapper[4811]: I0128 15:51:54.052303 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" podUID="b8099c49-c944-461f-b39b-5584c75e5f3c" containerName="route-controller-manager" containerID="cri-o://e0ef3f477059e34873e4e33687246e1147f33c8345df1b9414fd846d0f088da6" gracePeriod=30 Jan 28 15:51:54 crc kubenswrapper[4811]: I0128 15:51:54.774218 4811 generic.go:334] "Generic (PLEG): container finished" podID="b8099c49-c944-461f-b39b-5584c75e5f3c" containerID="e0ef3f477059e34873e4e33687246e1147f33c8345df1b9414fd846d0f088da6" exitCode=0 Jan 28 15:51:54 crc kubenswrapper[4811]: I0128 15:51:54.774280 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" event={"ID":"b8099c49-c944-461f-b39b-5584c75e5f3c","Type":"ContainerDied","Data":"e0ef3f477059e34873e4e33687246e1147f33c8345df1b9414fd846d0f088da6"} Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.023924 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.103905 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8099c49-c944-461f-b39b-5584c75e5f3c-serving-cert\") pod \"b8099c49-c944-461f-b39b-5584c75e5f3c\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.104063 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-client-ca\") pod \"b8099c49-c944-461f-b39b-5584c75e5f3c\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.104120 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8vqj\" (UniqueName: \"kubernetes.io/projected/b8099c49-c944-461f-b39b-5584c75e5f3c-kube-api-access-r8vqj\") pod \"b8099c49-c944-461f-b39b-5584c75e5f3c\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.104148 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-config\") pod \"b8099c49-c944-461f-b39b-5584c75e5f3c\" (UID: \"b8099c49-c944-461f-b39b-5584c75e5f3c\") " Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.104849 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-client-ca" (OuterVolumeSpecName: "client-ca") pod "b8099c49-c944-461f-b39b-5584c75e5f3c" (UID: "b8099c49-c944-461f-b39b-5584c75e5f3c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.104898 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-config" (OuterVolumeSpecName: "config") pod "b8099c49-c944-461f-b39b-5584c75e5f3c" (UID: "b8099c49-c944-461f-b39b-5584c75e5f3c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.109474 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8099c49-c944-461f-b39b-5584c75e5f3c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b8099c49-c944-461f-b39b-5584c75e5f3c" (UID: "b8099c49-c944-461f-b39b-5584c75e5f3c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.109691 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8099c49-c944-461f-b39b-5584c75e5f3c-kube-api-access-r8vqj" (OuterVolumeSpecName: "kube-api-access-r8vqj") pod "b8099c49-c944-461f-b39b-5584c75e5f3c" (UID: "b8099c49-c944-461f-b39b-5584c75e5f3c"). InnerVolumeSpecName "kube-api-access-r8vqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.205408 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.205461 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8vqj\" (UniqueName: \"kubernetes.io/projected/b8099c49-c944-461f-b39b-5584c75e5f3c-kube-api-access-r8vqj\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.205473 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8099c49-c944-461f-b39b-5584c75e5f3c-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.205480 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8099c49-c944-461f-b39b-5584c75e5f3c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.783835 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" event={"ID":"b8099c49-c944-461f-b39b-5584c75e5f3c","Type":"ContainerDied","Data":"96e6f1c6a0a62fb99dcbe65cdc1be33097e39a1b62a1d5ef6853eb6ccd06321d"} Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.783906 4811 scope.go:117] "RemoveContainer" containerID="e0ef3f477059e34873e4e33687246e1147f33c8345df1b9414fd846d0f088da6" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.783958 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.822674 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k"] Jan 28 15:51:55 crc kubenswrapper[4811]: E0128 15:51:55.822966 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="registry-server" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.822986 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="registry-server" Jan 28 15:51:55 crc kubenswrapper[4811]: E0128 15:51:55.823003 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8099c49-c944-461f-b39b-5584c75e5f3c" containerName="route-controller-manager" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.823012 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8099c49-c944-461f-b39b-5584c75e5f3c" containerName="route-controller-manager" Jan 28 15:51:55 crc kubenswrapper[4811]: E0128 15:51:55.823027 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="extract-content" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.823037 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="extract-content" Jan 28 15:51:55 crc kubenswrapper[4811]: E0128 15:51:55.823113 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="extract-utilities" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.823124 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="extract-utilities" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.823242 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8099c49-c944-461f-b39b-5584c75e5f3c" containerName="route-controller-manager" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.823267 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d23adda9-57ba-4cbf-be0a-6cec03fbdc1d" containerName="registry-server" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.823744 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.826413 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.826842 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.826932 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.827159 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.827501 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.827543 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.842293 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k"] Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.846271 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf"] Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.849479 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d99cb9764-wqfmf"] Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.913381 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8268851b-a2db-4a86-8179-288108e4af21-client-ca\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.913484 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8268851b-a2db-4a86-8179-288108e4af21-serving-cert\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.913538 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqx4n\" (UniqueName: \"kubernetes.io/projected/8268851b-a2db-4a86-8179-288108e4af21-kube-api-access-lqx4n\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:55 crc kubenswrapper[4811]: I0128 15:51:55.913566 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8268851b-a2db-4a86-8179-288108e4af21-config\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.014911 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8268851b-a2db-4a86-8179-288108e4af21-serving-cert\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.014960 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqx4n\" (UniqueName: \"kubernetes.io/projected/8268851b-a2db-4a86-8179-288108e4af21-kube-api-access-lqx4n\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.014984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8268851b-a2db-4a86-8179-288108e4af21-config\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.015037 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8268851b-a2db-4a86-8179-288108e4af21-client-ca\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.016082 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8268851b-a2db-4a86-8179-288108e4af21-client-ca\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.016224 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8268851b-a2db-4a86-8179-288108e4af21-config\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.020335 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8268851b-a2db-4a86-8179-288108e4af21-serving-cert\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.029001 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqx4n\" (UniqueName: \"kubernetes.io/projected/8268851b-a2db-4a86-8179-288108e4af21-kube-api-access-lqx4n\") pod \"route-controller-manager-6cdb6b9b54-78h4k\" (UID: \"8268851b-a2db-4a86-8179-288108e4af21\") " pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.142652 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.346047 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8099c49-c944-461f-b39b-5584c75e5f3c" path="/var/lib/kubelet/pods/b8099c49-c944-461f-b39b-5584c75e5f3c/volumes" Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.541401 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k"] Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.789103 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" event={"ID":"8268851b-a2db-4a86-8179-288108e4af21","Type":"ContainerStarted","Data":"3dd334d4384e29bb595b22fd72aa07b78a27a78d2e3e5ec39c530b806d02e1c7"} Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.789146 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" event={"ID":"8268851b-a2db-4a86-8179-288108e4af21","Type":"ContainerStarted","Data":"a5e71d4431f0c4ea553c453bbbb65e03cd4eef5c4c403a0c7c2cebc1ab126d50"} Jan 28 15:51:56 crc kubenswrapper[4811]: I0128 15:51:56.789453 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:57 crc kubenswrapper[4811]: I0128 15:51:57.117899 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" Jan 28 15:51:57 crc kubenswrapper[4811]: I0128 15:51:57.140297 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6cdb6b9b54-78h4k" podStartSLOduration=3.140276193 podStartE2EDuration="3.140276193s" podCreationTimestamp="2026-01-28 15:51:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:51:56.807746497 +0000 UTC m=+409.562110070" watchObservedRunningTime="2026-01-28 15:51:57.140276193 +0000 UTC m=+409.894639776" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.645644 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-znvwk"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.646930 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-znvwk" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="registry-server" containerID="cri-o://04980a755a0f820730eae8de3394b50e556c335c612950a84e022c533f17667a" gracePeriod=30 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.655154 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n2nwr"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.656001 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n2nwr" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="registry-server" containerID="cri-o://a71c600f431b6d81cc6dae14e1588c1b9991c8fc77eaf5c09738ae201faaf088" gracePeriod=30 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.663573 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jvb2s"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.663836 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" containerID="cri-o://6653d9c87aa03acc2e877c7b624742f3a764f1d61816d0f5450145e5b1b9f880" gracePeriod=30 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.677476 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zcd7b"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.678405 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.687136 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mql9l"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.687476 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mql9l" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="registry-server" containerID="cri-o://0dcdd77dc06325cbcc7ef92bc452d42e9889122acd892228819e1f17ac59d812" gracePeriod=30 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.691834 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zcd7b"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.699026 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v759z"] Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.699292 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v759z" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="registry-server" containerID="cri-o://72218ec5609a7f64e6796942b08af565a06acd4abfef5235c620cffef6f2ea9a" gracePeriod=30 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.813981 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/35fbe6f3-3900-4d40-9525-b171b73ea6b1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.814480 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/35fbe6f3-3900-4d40-9525-b171b73ea6b1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.814509 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p986q\" (UniqueName: \"kubernetes.io/projected/35fbe6f3-3900-4d40-9525-b171b73ea6b1-kube-api-access-p986q\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.840895 4811 generic.go:334] "Generic (PLEG): container finished" podID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerID="04980a755a0f820730eae8de3394b50e556c335c612950a84e022c533f17667a" exitCode=0 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.840973 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znvwk" event={"ID":"87f8aef8-757b-4a73-9ee5-09751c3b7e92","Type":"ContainerDied","Data":"04980a755a0f820730eae8de3394b50e556c335c612950a84e022c533f17667a"} Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.842906 4811 generic.go:334] "Generic (PLEG): container finished" podID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerID="6653d9c87aa03acc2e877c7b624742f3a764f1d61816d0f5450145e5b1b9f880" exitCode=0 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.842970 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" event={"ID":"7b0b6c1f-a9c1-41dc-8495-7646a81b616b","Type":"ContainerDied","Data":"6653d9c87aa03acc2e877c7b624742f3a764f1d61816d0f5450145e5b1b9f880"} Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.858916 4811 generic.go:334] "Generic (PLEG): container finished" podID="2195bb89-ac75-41a6-8e39-77506c50c101" containerID="72218ec5609a7f64e6796942b08af565a06acd4abfef5235c620cffef6f2ea9a" exitCode=0 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.858982 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerDied","Data":"72218ec5609a7f64e6796942b08af565a06acd4abfef5235c620cffef6f2ea9a"} Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.862877 4811 generic.go:334] "Generic (PLEG): container finished" podID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerID="a71c600f431b6d81cc6dae14e1588c1b9991c8fc77eaf5c09738ae201faaf088" exitCode=0 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.862966 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerDied","Data":"a71c600f431b6d81cc6dae14e1588c1b9991c8fc77eaf5c09738ae201faaf088"} Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.870850 4811 generic.go:334] "Generic (PLEG): container finished" podID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerID="0dcdd77dc06325cbcc7ef92bc452d42e9889122acd892228819e1f17ac59d812" exitCode=0 Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.870934 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerDied","Data":"0dcdd77dc06325cbcc7ef92bc452d42e9889122acd892228819e1f17ac59d812"} Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.916148 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/35fbe6f3-3900-4d40-9525-b171b73ea6b1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.916223 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/35fbe6f3-3900-4d40-9525-b171b73ea6b1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.916251 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p986q\" (UniqueName: \"kubernetes.io/projected/35fbe6f3-3900-4d40-9525-b171b73ea6b1-kube-api-access-p986q\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.918061 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/35fbe6f3-3900-4d40-9525-b171b73ea6b1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.921737 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/35fbe6f3-3900-4d40-9525-b171b73ea6b1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:02 crc kubenswrapper[4811]: I0128 15:52:02.936715 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p986q\" (UniqueName: \"kubernetes.io/projected/35fbe6f3-3900-4d40-9525-b171b73ea6b1-kube-api-access-p986q\") pod \"marketplace-operator-79b997595-zcd7b\" (UID: \"35fbe6f3-3900-4d40-9525-b171b73ea6b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.000186 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.086886 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.086937 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.086989 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.087554 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4eb1075bef300aa6f4b9a078a39752fc2bb693e12743f902b96be95bfa041b2e"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.087605 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://4eb1075bef300aa6f4b9a078a39752fc2bb693e12743f902b96be95bfa041b2e" gracePeriod=600 Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.135712 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.172071 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.177657 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.182069 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.195043 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.234848 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-utilities\") pod \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.235328 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lwj7\" (UniqueName: \"kubernetes.io/projected/87f8aef8-757b-4a73-9ee5-09751c3b7e92-kube-api-access-5lwj7\") pod \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.235372 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-catalog-content\") pod \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\" (UID: \"87f8aef8-757b-4a73-9ee5-09751c3b7e92\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.237961 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-utilities" (OuterVolumeSpecName: "utilities") pod "87f8aef8-757b-4a73-9ee5-09751c3b7e92" (UID: "87f8aef8-757b-4a73-9ee5-09751c3b7e92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.245708 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87f8aef8-757b-4a73-9ee5-09751c3b7e92-kube-api-access-5lwj7" (OuterVolumeSpecName: "kube-api-access-5lwj7") pod "87f8aef8-757b-4a73-9ee5-09751c3b7e92" (UID: "87f8aef8-757b-4a73-9ee5-09751c3b7e92"). InnerVolumeSpecName "kube-api-access-5lwj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.300857 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87f8aef8-757b-4a73-9ee5-09751c3b7e92" (UID: "87f8aef8-757b-4a73-9ee5-09751c3b7e92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.345383 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-utilities\") pod \"2195bb89-ac75-41a6-8e39-77506c50c101\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.346221 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-utilities" (OuterVolumeSpecName: "utilities") pod "2195bb89-ac75-41a6-8e39-77506c50c101" (UID: "2195bb89-ac75-41a6-8e39-77506c50c101"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.346917 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zcd7b"] Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.347001 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfd4c\" (UniqueName: \"kubernetes.io/projected/4bc51efa-36a8-4548-8497-5bc394e0de82-kube-api-access-dfd4c\") pod \"4bc51efa-36a8-4548-8497-5bc394e0de82\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.347675 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics\") pod \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.347881 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8nl5\" (UniqueName: \"kubernetes.io/projected/2195bb89-ac75-41a6-8e39-77506c50c101-kube-api-access-g8nl5\") pod \"2195bb89-ac75-41a6-8e39-77506c50c101\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.347938 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-catalog-content\") pod \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.347973 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-utilities\") pod \"4bc51efa-36a8-4548-8497-5bc394e0de82\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348019 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-catalog-content\") pod \"2195bb89-ac75-41a6-8e39-77506c50c101\" (UID: \"2195bb89-ac75-41a6-8e39-77506c50c101\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348044 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-catalog-content\") pod \"4bc51efa-36a8-4548-8497-5bc394e0de82\" (UID: \"4bc51efa-36a8-4548-8497-5bc394e0de82\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348071 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74wxb\" (UniqueName: \"kubernetes.io/projected/fcb7ac6d-c7f1-4c34-9805-e3333249b868-kube-api-access-74wxb\") pod \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348103 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca\") pod \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348140 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-utilities\") pod \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\" (UID: \"fcb7ac6d-c7f1-4c34-9805-e3333249b868\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348159 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f25hd\" (UniqueName: \"kubernetes.io/projected/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-kube-api-access-f25hd\") pod \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\" (UID: \"7b0b6c1f-a9c1-41dc-8495-7646a81b616b\") " Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348510 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348527 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lwj7\" (UniqueName: \"kubernetes.io/projected/87f8aef8-757b-4a73-9ee5-09751c3b7e92-kube-api-access-5lwj7\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348541 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.348551 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87f8aef8-757b-4a73-9ee5-09751c3b7e92-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.351659 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-utilities" (OuterVolumeSpecName: "utilities") pod "4bc51efa-36a8-4548-8497-5bc394e0de82" (UID: "4bc51efa-36a8-4548-8497-5bc394e0de82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.352176 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-utilities" (OuterVolumeSpecName: "utilities") pod "fcb7ac6d-c7f1-4c34-9805-e3333249b868" (UID: "fcb7ac6d-c7f1-4c34-9805-e3333249b868"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.352601 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "7b0b6c1f-a9c1-41dc-8495-7646a81b616b" (UID: "7b0b6c1f-a9c1-41dc-8495-7646a81b616b"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.353482 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "7b0b6c1f-a9c1-41dc-8495-7646a81b616b" (UID: "7b0b6c1f-a9c1-41dc-8495-7646a81b616b"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.354668 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bc51efa-36a8-4548-8497-5bc394e0de82-kube-api-access-dfd4c" (OuterVolumeSpecName: "kube-api-access-dfd4c") pod "4bc51efa-36a8-4548-8497-5bc394e0de82" (UID: "4bc51efa-36a8-4548-8497-5bc394e0de82"). InnerVolumeSpecName "kube-api-access-dfd4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.355247 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-kube-api-access-f25hd" (OuterVolumeSpecName: "kube-api-access-f25hd") pod "7b0b6c1f-a9c1-41dc-8495-7646a81b616b" (UID: "7b0b6c1f-a9c1-41dc-8495-7646a81b616b"). InnerVolumeSpecName "kube-api-access-f25hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.356108 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcb7ac6d-c7f1-4c34-9805-e3333249b868-kube-api-access-74wxb" (OuterVolumeSpecName: "kube-api-access-74wxb") pod "fcb7ac6d-c7f1-4c34-9805-e3333249b868" (UID: "fcb7ac6d-c7f1-4c34-9805-e3333249b868"). InnerVolumeSpecName "kube-api-access-74wxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.356705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2195bb89-ac75-41a6-8e39-77506c50c101-kube-api-access-g8nl5" (OuterVolumeSpecName: "kube-api-access-g8nl5") pod "2195bb89-ac75-41a6-8e39-77506c50c101" (UID: "2195bb89-ac75-41a6-8e39-77506c50c101"). InnerVolumeSpecName "kube-api-access-g8nl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.379135 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bc51efa-36a8-4548-8497-5bc394e0de82" (UID: "4bc51efa-36a8-4548-8497-5bc394e0de82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.405252 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcb7ac6d-c7f1-4c34-9805-e3333249b868" (UID: "fcb7ac6d-c7f1-4c34-9805-e3333249b868"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.449650 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450290 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74wxb\" (UniqueName: \"kubernetes.io/projected/fcb7ac6d-c7f1-4c34-9805-e3333249b868-kube-api-access-74wxb\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450311 4811 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450326 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450339 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f25hd\" (UniqueName: \"kubernetes.io/projected/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-kube-api-access-f25hd\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450350 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfd4c\" (UniqueName: \"kubernetes.io/projected/4bc51efa-36a8-4548-8497-5bc394e0de82-kube-api-access-dfd4c\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450360 4811 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7b0b6c1f-a9c1-41dc-8495-7646a81b616b-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450371 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8nl5\" (UniqueName: \"kubernetes.io/projected/2195bb89-ac75-41a6-8e39-77506c50c101-kube-api-access-g8nl5\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450383 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb7ac6d-c7f1-4c34-9805-e3333249b868-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.450392 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc51efa-36a8-4548-8497-5bc394e0de82-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.495007 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2195bb89-ac75-41a6-8e39-77506c50c101" (UID: "2195bb89-ac75-41a6-8e39-77506c50c101"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.551577 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195bb89-ac75-41a6-8e39-77506c50c101-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.877387 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znvwk" event={"ID":"87f8aef8-757b-4a73-9ee5-09751c3b7e92","Type":"ContainerDied","Data":"5c6fb20fd4486237ddee39f536f173a7671451e7d5bdf115f23441a80ae06c45"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.877751 4811 scope.go:117] "RemoveContainer" containerID="04980a755a0f820730eae8de3394b50e556c335c612950a84e022c533f17667a" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.877654 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znvwk" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.879411 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" event={"ID":"35fbe6f3-3900-4d40-9525-b171b73ea6b1","Type":"ContainerStarted","Data":"b6d9329b8e47516515cf04bb7f4ed2ab452a7b981a6324f82b5ce8f3daa31a8b"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.879458 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" event={"ID":"35fbe6f3-3900-4d40-9525-b171b73ea6b1","Type":"ContainerStarted","Data":"a7ef726c2a771afa2100f20746762c5b6601c6191404f6260c1f6839f6e95332"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.879833 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.882173 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" event={"ID":"7b0b6c1f-a9c1-41dc-8495-7646a81b616b","Type":"ContainerDied","Data":"e6e63e170befe90a817ef759d8c0d030c60277a4bc872ffc07f2e54e5c4c6fde"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.882185 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jvb2s" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.884812 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.885367 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="4eb1075bef300aa6f4b9a078a39752fc2bb693e12743f902b96be95bfa041b2e" exitCode=0 Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.885389 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"4eb1075bef300aa6f4b9a078a39752fc2bb693e12743f902b96be95bfa041b2e"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.885418 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"2f52ccc0fd9df1dd9ba999c3d6271df950a17bf565b0226b513fdf0e88b0f8fc"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.892311 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v759z" event={"ID":"2195bb89-ac75-41a6-8e39-77506c50c101","Type":"ContainerDied","Data":"3fddb4cb5d7dcfdf2bf89ff773a45ed240a826a4bdae9b0783eab49a47c7c6cf"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.892501 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v759z" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.895883 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2nwr" event={"ID":"fcb7ac6d-c7f1-4c34-9805-e3333249b868","Type":"ContainerDied","Data":"77676292929d2e8e75157235a6425b9fd1b69a7252298d785863cb1ec9b5e0b0"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.896029 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2nwr" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.899355 4811 scope.go:117] "RemoveContainer" containerID="19b9e50cff77e69ab40bb3e8e544d607c3b8ea26583c95ba6a1856d1b5c100c1" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.899892 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mql9l" event={"ID":"4bc51efa-36a8-4548-8497-5bc394e0de82","Type":"ContainerDied","Data":"e6e8b43d233cf622c3c44f305c6cab9f322f2f10e4951c2c7247179eef5140e1"} Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.905447 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mql9l" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.913257 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-zcd7b" podStartSLOduration=1.913238998 podStartE2EDuration="1.913238998s" podCreationTimestamp="2026-01-28 15:52:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:52:03.907989664 +0000 UTC m=+416.662353267" watchObservedRunningTime="2026-01-28 15:52:03.913238998 +0000 UTC m=+416.667602571" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.918670 4811 scope.go:117] "RemoveContainer" containerID="a8aa3039e298be459caf9356028ad13f97f7f1d5712eec3b61c2cec06187e3c7" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.927025 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-znvwk"] Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.931447 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-znvwk"] Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.947515 4811 scope.go:117] "RemoveContainer" containerID="6653d9c87aa03acc2e877c7b624742f3a764f1d61816d0f5450145e5b1b9f880" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.987903 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n2nwr"] Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.992166 4811 scope.go:117] "RemoveContainer" containerID="86a9f66e055e8e065f35cc51f5630c08b2399a4ee293ba71c846f4928d292db7" Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.993387 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n2nwr"] Jan 28 15:52:03 crc kubenswrapper[4811]: I0128 15:52:03.999026 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mql9l"] Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.002311 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mql9l"] Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.005652 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jvb2s"] Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.008861 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jvb2s"] Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.012731 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v759z"] Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.015514 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v759z"] Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.019896 4811 scope.go:117] "RemoveContainer" containerID="72218ec5609a7f64e6796942b08af565a06acd4abfef5235c620cffef6f2ea9a" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.043487 4811 scope.go:117] "RemoveContainer" containerID="ffcdcc3e4fedf8b3b4b6ff6008cac4e474be2d223df97c8cfc7e0f4664c6f447" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.062722 4811 scope.go:117] "RemoveContainer" containerID="3ec267fb51f00ed9e056053ed2175509de5db7e2491aa0fb80809e5c848a9baa" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.079715 4811 scope.go:117] "RemoveContainer" containerID="a71c600f431b6d81cc6dae14e1588c1b9991c8fc77eaf5c09738ae201faaf088" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.099416 4811 scope.go:117] "RemoveContainer" containerID="87bb222eb4a487252106f56f9f33659aa1666ca5be60e4d76b17f36363c9b9e5" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.112185 4811 scope.go:117] "RemoveContainer" containerID="bcbac9d59106e06fdfb0529cf9fad9752eac012c0ddd4b0bd12ea4324c9d2641" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.124700 4811 scope.go:117] "RemoveContainer" containerID="0dcdd77dc06325cbcc7ef92bc452d42e9889122acd892228819e1f17ac59d812" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.136835 4811 scope.go:117] "RemoveContainer" containerID="e2729603d1592c3ad45d98c17e97e316b669e821e1bd642303f23239c14837ef" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.149265 4811 scope.go:117] "RemoveContainer" containerID="31a8e614278a68693577f9947ed387f5461fc2275375a8e9c9f56799ca085a51" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.352154 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" path="/var/lib/kubelet/pods/2195bb89-ac75-41a6-8e39-77506c50c101/volumes" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.352854 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" path="/var/lib/kubelet/pods/4bc51efa-36a8-4548-8497-5bc394e0de82/volumes" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.353755 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" path="/var/lib/kubelet/pods/7b0b6c1f-a9c1-41dc-8495-7646a81b616b/volumes" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.354853 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" path="/var/lib/kubelet/pods/87f8aef8-757b-4a73-9ee5-09751c3b7e92/volumes" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.355770 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" path="/var/lib/kubelet/pods/fcb7ac6d-c7f1-4c34-9805-e3333249b868/volumes" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.865719 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tkqqj"] Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866468 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866492 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866507 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866516 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866532 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866540 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866550 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866557 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866569 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866577 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866587 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866594 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866608 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866616 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866625 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866632 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866643 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866651 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866661 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866668 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="extract-utilities" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866679 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866685 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866693 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866698 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: E0128 15:52:04.866704 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866709 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="extract-content" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.866790 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bc51efa-36a8-4548-8497-5bc394e0de82" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.867577 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b0b6c1f-a9c1-41dc-8495-7646a81b616b" containerName="marketplace-operator" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.867634 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="87f8aef8-757b-4a73-9ee5-09751c3b7e92" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.867657 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb7ac6d-c7f1-4c34-9805-e3333249b868" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.867673 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2195bb89-ac75-41a6-8e39-77506c50c101" containerName="registry-server" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.868775 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.873312 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 28 15:52:04 crc kubenswrapper[4811]: I0128 15:52:04.878043 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkqqj"] Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.060530 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-spdrn"] Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.061485 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.064062 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.069612 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwx7c\" (UniqueName: \"kubernetes.io/projected/38348412-bdb3-4e83-aa59-cb46112a648e-kube-api-access-vwx7c\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.070216 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38348412-bdb3-4e83-aa59-cb46112a648e-catalog-content\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.070261 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38348412-bdb3-4e83-aa59-cb46112a648e-utilities\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.073130 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spdrn"] Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171292 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwx7c\" (UniqueName: \"kubernetes.io/projected/38348412-bdb3-4e83-aa59-cb46112a648e-kube-api-access-vwx7c\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171348 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70389e06-5bb1-421b-a403-7f1c5147bedb-utilities\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171379 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38348412-bdb3-4e83-aa59-cb46112a648e-catalog-content\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171407 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38348412-bdb3-4e83-aa59-cb46112a648e-utilities\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171452 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnz62\" (UniqueName: \"kubernetes.io/projected/70389e06-5bb1-421b-a403-7f1c5147bedb-kube-api-access-hnz62\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171473 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70389e06-5bb1-421b-a403-7f1c5147bedb-catalog-content\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171838 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38348412-bdb3-4e83-aa59-cb46112a648e-catalog-content\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.171897 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38348412-bdb3-4e83-aa59-cb46112a648e-utilities\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.191247 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwx7c\" (UniqueName: \"kubernetes.io/projected/38348412-bdb3-4e83-aa59-cb46112a648e-kube-api-access-vwx7c\") pod \"redhat-marketplace-tkqqj\" (UID: \"38348412-bdb3-4e83-aa59-cb46112a648e\") " pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.201061 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.272706 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70389e06-5bb1-421b-a403-7f1c5147bedb-utilities\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.272781 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnz62\" (UniqueName: \"kubernetes.io/projected/70389e06-5bb1-421b-a403-7f1c5147bedb-kube-api-access-hnz62\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.272810 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70389e06-5bb1-421b-a403-7f1c5147bedb-catalog-content\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.273726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70389e06-5bb1-421b-a403-7f1c5147bedb-utilities\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.273762 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70389e06-5bb1-421b-a403-7f1c5147bedb-catalog-content\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.293644 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnz62\" (UniqueName: \"kubernetes.io/projected/70389e06-5bb1-421b-a403-7f1c5147bedb-kube-api-access-hnz62\") pod \"certified-operators-spdrn\" (UID: \"70389e06-5bb1-421b-a403-7f1c5147bedb\") " pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.385819 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.581549 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkqqj"] Jan 28 15:52:05 crc kubenswrapper[4811]: W0128 15:52:05.588513 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38348412_bdb3_4e83_aa59_cb46112a648e.slice/crio-f34cb0f0e6115b7d04f325722a0e2a102225cbdd4d2cf54147fd56aa22a6da0e WatchSource:0}: Error finding container f34cb0f0e6115b7d04f325722a0e2a102225cbdd4d2cf54147fd56aa22a6da0e: Status 404 returned error can't find the container with id f34cb0f0e6115b7d04f325722a0e2a102225cbdd4d2cf54147fd56aa22a6da0e Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.779682 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spdrn"] Jan 28 15:52:05 crc kubenswrapper[4811]: W0128 15:52:05.790318 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70389e06_5bb1_421b_a403_7f1c5147bedb.slice/crio-5afda4beb0f316d4cbbcf7020000e0f02fe7656fcf87d77d46674e61255cdf02 WatchSource:0}: Error finding container 5afda4beb0f316d4cbbcf7020000e0f02fe7656fcf87d77d46674e61255cdf02: Status 404 returned error can't find the container with id 5afda4beb0f316d4cbbcf7020000e0f02fe7656fcf87d77d46674e61255cdf02 Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.816712 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bflx2"] Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.817490 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.835263 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bflx2"] Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.922372 4811 generic.go:334] "Generic (PLEG): container finished" podID="38348412-bdb3-4e83-aa59-cb46112a648e" containerID="0ef7102029546bf382e912b0f33b9df78a5e3f221a283d53721552bb4b8473e0" exitCode=0 Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.922444 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkqqj" event={"ID":"38348412-bdb3-4e83-aa59-cb46112a648e","Type":"ContainerDied","Data":"0ef7102029546bf382e912b0f33b9df78a5e3f221a283d53721552bb4b8473e0"} Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.922498 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkqqj" event={"ID":"38348412-bdb3-4e83-aa59-cb46112a648e","Type":"ContainerStarted","Data":"f34cb0f0e6115b7d04f325722a0e2a102225cbdd4d2cf54147fd56aa22a6da0e"} Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.923309 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdrn" event={"ID":"70389e06-5bb1-421b-a403-7f1c5147bedb","Type":"ContainerStarted","Data":"5afda4beb0f316d4cbbcf7020000e0f02fe7656fcf87d77d46674e61255cdf02"} Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987297 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-bound-sa-token\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987359 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3bab724d-709a-4df7-adb0-10d556185cf4-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987384 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3bab724d-709a-4df7-adb0-10d556185cf4-registry-certificates\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987400 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9zx8\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-kube-api-access-p9zx8\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987460 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987498 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bab724d-709a-4df7-adb0-10d556185cf4-trusted-ca\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987531 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-registry-tls\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:05 crc kubenswrapper[4811]: I0128 15:52:05.987553 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3bab724d-709a-4df7-adb0-10d556185cf4-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.018973 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089498 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3bab724d-709a-4df7-adb0-10d556185cf4-registry-certificates\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089536 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9zx8\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-kube-api-access-p9zx8\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089563 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bab724d-709a-4df7-adb0-10d556185cf4-trusted-ca\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089607 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-registry-tls\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089625 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3bab724d-709a-4df7-adb0-10d556185cf4-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089684 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-bound-sa-token\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.089753 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3bab724d-709a-4df7-adb0-10d556185cf4-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.090816 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bab724d-709a-4df7-adb0-10d556185cf4-trusted-ca\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.090858 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3bab724d-709a-4df7-adb0-10d556185cf4-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.090947 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3bab724d-709a-4df7-adb0-10d556185cf4-registry-certificates\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.103487 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3bab724d-709a-4df7-adb0-10d556185cf4-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.103540 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-registry-tls\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.105838 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9zx8\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-kube-api-access-p9zx8\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.106610 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3bab724d-709a-4df7-adb0-10d556185cf4-bound-sa-token\") pod \"image-registry-66df7c8f76-bflx2\" (UID: \"3bab724d-709a-4df7-adb0-10d556185cf4\") " pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.251774 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.640769 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bflx2"] Jan 28 15:52:06 crc kubenswrapper[4811]: W0128 15:52:06.653510 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bab724d_709a_4df7_adb0_10d556185cf4.slice/crio-d1b86b9d660e6464815e012b7ed1535a0ba372ed03da0b07f536e60e88988006 WatchSource:0}: Error finding container d1b86b9d660e6464815e012b7ed1535a0ba372ed03da0b07f536e60e88988006: Status 404 returned error can't find the container with id d1b86b9d660e6464815e012b7ed1535a0ba372ed03da0b07f536e60e88988006 Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.929572 4811 generic.go:334] "Generic (PLEG): container finished" podID="70389e06-5bb1-421b-a403-7f1c5147bedb" containerID="c4b556d9ef99924602a21d701cd4f06277118d31a72fd9873738ec05f3b80fb0" exitCode=0 Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.930351 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdrn" event={"ID":"70389e06-5bb1-421b-a403-7f1c5147bedb","Type":"ContainerDied","Data":"c4b556d9ef99924602a21d701cd4f06277118d31a72fd9873738ec05f3b80fb0"} Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.931898 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" event={"ID":"3bab724d-709a-4df7-adb0-10d556185cf4","Type":"ContainerStarted","Data":"7200a3d537244a22d8242ffab94149815375088a8845c5dfcb8f9983e165fb1c"} Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.931933 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" event={"ID":"3bab724d-709a-4df7-adb0-10d556185cf4","Type":"ContainerStarted","Data":"d1b86b9d660e6464815e012b7ed1535a0ba372ed03da0b07f536e60e88988006"} Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.932044 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:06 crc kubenswrapper[4811]: I0128 15:52:06.971985 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" podStartSLOduration=1.9719591250000001 podStartE2EDuration="1.971959125s" podCreationTimestamp="2026-01-28 15:52:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:52:06.965798416 +0000 UTC m=+419.720162009" watchObservedRunningTime="2026-01-28 15:52:06.971959125 +0000 UTC m=+419.726322708" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.263285 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n9g6x"] Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.264928 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.267572 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.274110 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9g6x"] Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.406291 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-catalog-content\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.406380 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv2vr\" (UniqueName: \"kubernetes.io/projected/30afaad7-f370-49c2-8813-9f66d50ad6c5-kube-api-access-gv2vr\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.406418 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-utilities\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.470758 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mnp8r"] Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.472561 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.474294 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.478113 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mnp8r"] Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.506991 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv2vr\" (UniqueName: \"kubernetes.io/projected/30afaad7-f370-49c2-8813-9f66d50ad6c5-kube-api-access-gv2vr\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.507037 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-utilities\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.507128 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-catalog-content\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.507552 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-utilities\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.507576 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-catalog-content\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.528823 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv2vr\" (UniqueName: \"kubernetes.io/projected/30afaad7-f370-49c2-8813-9f66d50ad6c5-kube-api-access-gv2vr\") pod \"community-operators-n9g6x\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.581178 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.608466 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a12e131-89ad-4b37-b79e-2ba9e40122a3-utilities\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.608893 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a12e131-89ad-4b37-b79e-2ba9e40122a3-catalog-content\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.609067 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq28s\" (UniqueName: \"kubernetes.io/projected/7a12e131-89ad-4b37-b79e-2ba9e40122a3-kube-api-access-dq28s\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.710702 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq28s\" (UniqueName: \"kubernetes.io/projected/7a12e131-89ad-4b37-b79e-2ba9e40122a3-kube-api-access-dq28s\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.710996 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a12e131-89ad-4b37-b79e-2ba9e40122a3-utilities\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.711173 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a12e131-89ad-4b37-b79e-2ba9e40122a3-catalog-content\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.711801 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a12e131-89ad-4b37-b79e-2ba9e40122a3-catalog-content\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.711818 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a12e131-89ad-4b37-b79e-2ba9e40122a3-utilities\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.729051 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq28s\" (UniqueName: \"kubernetes.io/projected/7a12e131-89ad-4b37-b79e-2ba9e40122a3-kube-api-access-dq28s\") pod \"redhat-operators-mnp8r\" (UID: \"7a12e131-89ad-4b37-b79e-2ba9e40122a3\") " pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.801262 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.959712 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9g6x"] Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.960112 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkqqj" event={"ID":"38348412-bdb3-4e83-aa59-cb46112a648e","Type":"ContainerDied","Data":"ef0366e1ce95f2d0b8144fe9a44d77caa71d1fa1c5bcafa8e15263ca418f1895"} Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.959954 4811 generic.go:334] "Generic (PLEG): container finished" podID="38348412-bdb3-4e83-aa59-cb46112a648e" containerID="ef0366e1ce95f2d0b8144fe9a44d77caa71d1fa1c5bcafa8e15263ca418f1895" exitCode=0 Jan 28 15:52:07 crc kubenswrapper[4811]: I0128 15:52:07.967361 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdrn" event={"ID":"70389e06-5bb1-421b-a403-7f1c5147bedb","Type":"ContainerStarted","Data":"e6e88cfde436514c2abb238f07e414f1b8770797f1c979f1af68c1a5d38252f8"} Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.252934 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mnp8r"] Jan 28 15:52:08 crc kubenswrapper[4811]: W0128 15:52:08.260568 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a12e131_89ad_4b37_b79e_2ba9e40122a3.slice/crio-dc342f1a58be84adfa0bcbe3ad3758b49c2acef0bc909f05b7e0f4aa48b52b9d WatchSource:0}: Error finding container dc342f1a58be84adfa0bcbe3ad3758b49c2acef0bc909f05b7e0f4aa48b52b9d: Status 404 returned error can't find the container with id dc342f1a58be84adfa0bcbe3ad3758b49c2acef0bc909f05b7e0f4aa48b52b9d Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.973708 4811 generic.go:334] "Generic (PLEG): container finished" podID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerID="1d0912e720a10b86c8e52b41115c372758961b7dcc9e7260098c41d3fca7a015" exitCode=0 Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.973816 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerDied","Data":"1d0912e720a10b86c8e52b41115c372758961b7dcc9e7260098c41d3fca7a015"} Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.974078 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerStarted","Data":"a390f24cdb05655642296e0c9485262864b8a2a349c7325a6f9de91e3ea1296d"} Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.975660 4811 generic.go:334] "Generic (PLEG): container finished" podID="7a12e131-89ad-4b37-b79e-2ba9e40122a3" containerID="bb0b63347714ba210bac8ea73337aa1fdbf93513ab2c5ad9a0e71facdbe220dd" exitCode=0 Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.975711 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mnp8r" event={"ID":"7a12e131-89ad-4b37-b79e-2ba9e40122a3","Type":"ContainerDied","Data":"bb0b63347714ba210bac8ea73337aa1fdbf93513ab2c5ad9a0e71facdbe220dd"} Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.975730 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mnp8r" event={"ID":"7a12e131-89ad-4b37-b79e-2ba9e40122a3","Type":"ContainerStarted","Data":"dc342f1a58be84adfa0bcbe3ad3758b49c2acef0bc909f05b7e0f4aa48b52b9d"} Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.979953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkqqj" event={"ID":"38348412-bdb3-4e83-aa59-cb46112a648e","Type":"ContainerStarted","Data":"7cb669615566d00f2ab25f3afdd8f206bf8ec220ff25e5c6c8ce73e13dff9869"} Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.982920 4811 generic.go:334] "Generic (PLEG): container finished" podID="70389e06-5bb1-421b-a403-7f1c5147bedb" containerID="e6e88cfde436514c2abb238f07e414f1b8770797f1c979f1af68c1a5d38252f8" exitCode=0 Jan 28 15:52:08 crc kubenswrapper[4811]: I0128 15:52:08.982972 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdrn" event={"ID":"70389e06-5bb1-421b-a403-7f1c5147bedb","Type":"ContainerDied","Data":"e6e88cfde436514c2abb238f07e414f1b8770797f1c979f1af68c1a5d38252f8"} Jan 28 15:52:09 crc kubenswrapper[4811]: I0128 15:52:09.046899 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tkqqj" podStartSLOduration=2.612076825 podStartE2EDuration="5.04687838s" podCreationTimestamp="2026-01-28 15:52:04 +0000 UTC" firstStartedPulling="2026-01-28 15:52:05.923706388 +0000 UTC m=+418.678069971" lastFinishedPulling="2026-01-28 15:52:08.358507943 +0000 UTC m=+421.112871526" observedRunningTime="2026-01-28 15:52:09.044389352 +0000 UTC m=+421.798752935" watchObservedRunningTime="2026-01-28 15:52:09.04687838 +0000 UTC m=+421.801241963" Jan 28 15:52:09 crc kubenswrapper[4811]: I0128 15:52:09.990607 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mnp8r" event={"ID":"7a12e131-89ad-4b37-b79e-2ba9e40122a3","Type":"ContainerStarted","Data":"1fa523455109695af3d2eeab174a935e607628e19b23d0385ee5a633f57464d4"} Jan 28 15:52:09 crc kubenswrapper[4811]: I0128 15:52:09.992928 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdrn" event={"ID":"70389e06-5bb1-421b-a403-7f1c5147bedb","Type":"ContainerStarted","Data":"92752b2dfbcf1eb0b5e24a2f949a357488be5e66ea0054296ddfdea6d9b2ba55"} Jan 28 15:52:09 crc kubenswrapper[4811]: I0128 15:52:09.995927 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerStarted","Data":"c1cde5fe3cce8d09a8669e2c80aadb1b7acded66018bb8391861829e74a4a111"} Jan 28 15:52:10 crc kubenswrapper[4811]: I0128 15:52:10.031521 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-spdrn" podStartSLOduration=2.569105715 podStartE2EDuration="5.031503583s" podCreationTimestamp="2026-01-28 15:52:05 +0000 UTC" firstStartedPulling="2026-01-28 15:52:06.93178251 +0000 UTC m=+419.686146093" lastFinishedPulling="2026-01-28 15:52:09.394180378 +0000 UTC m=+422.148543961" observedRunningTime="2026-01-28 15:52:10.029289433 +0000 UTC m=+422.783653036" watchObservedRunningTime="2026-01-28 15:52:10.031503583 +0000 UTC m=+422.785867166" Jan 28 15:52:11 crc kubenswrapper[4811]: I0128 15:52:11.002468 4811 generic.go:334] "Generic (PLEG): container finished" podID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerID="c1cde5fe3cce8d09a8669e2c80aadb1b7acded66018bb8391861829e74a4a111" exitCode=0 Jan 28 15:52:11 crc kubenswrapper[4811]: I0128 15:52:11.002551 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerDied","Data":"c1cde5fe3cce8d09a8669e2c80aadb1b7acded66018bb8391861829e74a4a111"} Jan 28 15:52:11 crc kubenswrapper[4811]: I0128 15:52:11.006670 4811 generic.go:334] "Generic (PLEG): container finished" podID="7a12e131-89ad-4b37-b79e-2ba9e40122a3" containerID="1fa523455109695af3d2eeab174a935e607628e19b23d0385ee5a633f57464d4" exitCode=0 Jan 28 15:52:11 crc kubenswrapper[4811]: I0128 15:52:11.007142 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mnp8r" event={"ID":"7a12e131-89ad-4b37-b79e-2ba9e40122a3","Type":"ContainerDied","Data":"1fa523455109695af3d2eeab174a935e607628e19b23d0385ee5a633f57464d4"} Jan 28 15:52:12 crc kubenswrapper[4811]: I0128 15:52:12.016332 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerStarted","Data":"22b17862f1b05427d66dd3a614214138c961a643739ced7f74604e99733cc1dd"} Jan 28 15:52:12 crc kubenswrapper[4811]: I0128 15:52:12.019334 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mnp8r" event={"ID":"7a12e131-89ad-4b37-b79e-2ba9e40122a3","Type":"ContainerStarted","Data":"49e64ca0f7dc6369936b0b95b390daa0b946fd8187ceb57bf3280161d5690442"} Jan 28 15:52:12 crc kubenswrapper[4811]: I0128 15:52:12.077454 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mnp8r" podStartSLOduration=2.324515129 podStartE2EDuration="5.077409528s" podCreationTimestamp="2026-01-28 15:52:07 +0000 UTC" firstStartedPulling="2026-01-28 15:52:08.977934742 +0000 UTC m=+421.732298325" lastFinishedPulling="2026-01-28 15:52:11.730829151 +0000 UTC m=+424.485192724" observedRunningTime="2026-01-28 15:52:12.074118789 +0000 UTC m=+424.828482382" watchObservedRunningTime="2026-01-28 15:52:12.077409528 +0000 UTC m=+424.831773111" Jan 28 15:52:12 crc kubenswrapper[4811]: I0128 15:52:12.078776 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n9g6x" podStartSLOduration=2.454420007 podStartE2EDuration="5.078767775s" podCreationTimestamp="2026-01-28 15:52:07 +0000 UTC" firstStartedPulling="2026-01-28 15:52:08.976513253 +0000 UTC m=+421.730876836" lastFinishedPulling="2026-01-28 15:52:11.600861031 +0000 UTC m=+424.355224604" observedRunningTime="2026-01-28 15:52:12.052659594 +0000 UTC m=+424.807023177" watchObservedRunningTime="2026-01-28 15:52:12.078767775 +0000 UTC m=+424.833131358" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.063003 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-74ffcbf647-hd2sr"] Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.063784 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" podUID="39c9ca82-2caf-4dff-9293-a3a281c2705e" containerName="controller-manager" containerID="cri-o://43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6" gracePeriod=30 Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.446972 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.609992 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-proxy-ca-bundles\") pod \"39c9ca82-2caf-4dff-9293-a3a281c2705e\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.610061 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c9ca82-2caf-4dff-9293-a3a281c2705e-serving-cert\") pod \"39c9ca82-2caf-4dff-9293-a3a281c2705e\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.610163 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxhpb\" (UniqueName: \"kubernetes.io/projected/39c9ca82-2caf-4dff-9293-a3a281c2705e-kube-api-access-hxhpb\") pod \"39c9ca82-2caf-4dff-9293-a3a281c2705e\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.610220 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-client-ca\") pod \"39c9ca82-2caf-4dff-9293-a3a281c2705e\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.610804 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-client-ca" (OuterVolumeSpecName: "client-ca") pod "39c9ca82-2caf-4dff-9293-a3a281c2705e" (UID: "39c9ca82-2caf-4dff-9293-a3a281c2705e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.610969 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-config" (OuterVolumeSpecName: "config") pod "39c9ca82-2caf-4dff-9293-a3a281c2705e" (UID: "39c9ca82-2caf-4dff-9293-a3a281c2705e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.610249 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-config\") pod \"39c9ca82-2caf-4dff-9293-a3a281c2705e\" (UID: \"39c9ca82-2caf-4dff-9293-a3a281c2705e\") " Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.611234 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "39c9ca82-2caf-4dff-9293-a3a281c2705e" (UID: "39c9ca82-2caf-4dff-9293-a3a281c2705e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.611460 4811 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.611481 4811 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.611516 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c9ca82-2caf-4dff-9293-a3a281c2705e-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.615049 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39c9ca82-2caf-4dff-9293-a3a281c2705e-kube-api-access-hxhpb" (OuterVolumeSpecName: "kube-api-access-hxhpb") pod "39c9ca82-2caf-4dff-9293-a3a281c2705e" (UID: "39c9ca82-2caf-4dff-9293-a3a281c2705e"). InnerVolumeSpecName "kube-api-access-hxhpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.615063 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39c9ca82-2caf-4dff-9293-a3a281c2705e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "39c9ca82-2caf-4dff-9293-a3a281c2705e" (UID: "39c9ca82-2caf-4dff-9293-a3a281c2705e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.713135 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxhpb\" (UniqueName: \"kubernetes.io/projected/39c9ca82-2caf-4dff-9293-a3a281c2705e-kube-api-access-hxhpb\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:14 crc kubenswrapper[4811]: I0128 15:52:14.713182 4811 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c9ca82-2caf-4dff-9293-a3a281c2705e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.047391 4811 generic.go:334] "Generic (PLEG): container finished" podID="39c9ca82-2caf-4dff-9293-a3a281c2705e" containerID="43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6" exitCode=0 Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.047481 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.047479 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" event={"ID":"39c9ca82-2caf-4dff-9293-a3a281c2705e","Type":"ContainerDied","Data":"43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6"} Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.047903 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74ffcbf647-hd2sr" event={"ID":"39c9ca82-2caf-4dff-9293-a3a281c2705e","Type":"ContainerDied","Data":"3921fb1ec0859e60434a7c4935ee898b8ec981ffabe4c31c641372965a2f88c7"} Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.047930 4811 scope.go:117] "RemoveContainer" containerID="43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.078505 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-74ffcbf647-hd2sr"] Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.080616 4811 scope.go:117] "RemoveContainer" containerID="43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6" Jan 28 15:52:15 crc kubenswrapper[4811]: E0128 15:52:15.081387 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6\": container with ID starting with 43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6 not found: ID does not exist" containerID="43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.081521 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6"} err="failed to get container status \"43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6\": rpc error: code = NotFound desc = could not find container \"43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6\": container with ID starting with 43b0e3d13543a3c8dee79e555afb60dcfff94cce050074bd864ce6f8f4850ec6 not found: ID does not exist" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.085765 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-74ffcbf647-hd2sr"] Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.201522 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.201776 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.242559 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.386624 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.386691 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.430194 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.837208 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-798f9bf9f7-kq26l"] Jan 28 15:52:15 crc kubenswrapper[4811]: E0128 15:52:15.837572 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39c9ca82-2caf-4dff-9293-a3a281c2705e" containerName="controller-manager" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.837602 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="39c9ca82-2caf-4dff-9293-a3a281c2705e" containerName="controller-manager" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.837757 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="39c9ca82-2caf-4dff-9293-a3a281c2705e" containerName="controller-manager" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.838386 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.840275 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.841712 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.842586 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.842857 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.843109 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.843326 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.850484 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.851943 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-798f9bf9f7-kq26l"] Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.929692 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-proxy-ca-bundles\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.929763 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-client-ca\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.929787 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-serving-cert\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.930009 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2h5s\" (UniqueName: \"kubernetes.io/projected/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-kube-api-access-w2h5s\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:15 crc kubenswrapper[4811]: I0128 15:52:15.930057 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-config\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.031121 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2h5s\" (UniqueName: \"kubernetes.io/projected/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-kube-api-access-w2h5s\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.031179 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-config\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.031214 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-proxy-ca-bundles\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.031231 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-client-ca\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.031252 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-serving-cert\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.032461 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-client-ca\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.032568 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-proxy-ca-bundles\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.032796 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-config\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.042345 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-serving-cert\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.055928 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2h5s\" (UniqueName: \"kubernetes.io/projected/a91d0ce5-75b1-4593-ae76-02f2a1d8eed1-kube-api-access-w2h5s\") pod \"controller-manager-798f9bf9f7-kq26l\" (UID: \"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1\") " pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.096587 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-spdrn" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.097131 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tkqqj" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.162300 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.363381 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39c9ca82-2caf-4dff-9293-a3a281c2705e" path="/var/lib/kubelet/pods/39c9ca82-2caf-4dff-9293-a3a281c2705e/volumes" Jan 28 15:52:16 crc kubenswrapper[4811]: I0128 15:52:16.364307 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-798f9bf9f7-kq26l"] Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.060386 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" event={"ID":"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1","Type":"ContainerStarted","Data":"6318f59cb32d7b9c693214b6f9076fd5fcce5ecd01d19f35604b4f4479ffe8aa"} Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.060452 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" event={"ID":"a91d0ce5-75b1-4593-ae76-02f2a1d8eed1","Type":"ContainerStarted","Data":"2ccdbb54943300f1e808b44a1e07c88df7c69315ba1b23a769a055997bd14b94"} Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.080870 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" podStartSLOduration=3.080852165 podStartE2EDuration="3.080852165s" podCreationTimestamp="2026-01-28 15:52:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:52:17.077960956 +0000 UTC m=+429.832324539" watchObservedRunningTime="2026-01-28 15:52:17.080852165 +0000 UTC m=+429.835215748" Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.581789 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.582122 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.634949 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.801618 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.802462 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:17 crc kubenswrapper[4811]: I0128 15:52:17.843257 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:18 crc kubenswrapper[4811]: I0128 15:52:18.065240 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:18 crc kubenswrapper[4811]: I0128 15:52:18.070563 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-798f9bf9f7-kq26l" Jan 28 15:52:18 crc kubenswrapper[4811]: I0128 15:52:18.128337 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mnp8r" Jan 28 15:52:18 crc kubenswrapper[4811]: I0128 15:52:18.128699 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 15:52:26 crc kubenswrapper[4811]: I0128 15:52:26.257644 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bflx2" Jan 28 15:52:26 crc kubenswrapper[4811]: I0128 15:52:26.316067 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hcsxq"] Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.358814 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" podUID="ec729dda-4b2d-458b-8e11-6cc8beacb717" containerName="registry" containerID="cri-o://ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452" gracePeriod=30 Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.810574 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939060 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec729dda-4b2d-458b-8e11-6cc8beacb717-installation-pull-secrets\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939149 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec729dda-4b2d-458b-8e11-6cc8beacb717-ca-trust-extracted\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939313 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939387 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8cm9\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-kube-api-access-z8cm9\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939416 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-certificates\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939464 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-trusted-ca\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939484 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-bound-sa-token\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.939532 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-tls\") pod \"ec729dda-4b2d-458b-8e11-6cc8beacb717\" (UID: \"ec729dda-4b2d-458b-8e11-6cc8beacb717\") " Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.940660 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.940836 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.948784 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.948928 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec729dda-4b2d-458b-8e11-6cc8beacb717-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.949517 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-kube-api-access-z8cm9" (OuterVolumeSpecName: "kube-api-access-z8cm9") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "kube-api-access-z8cm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.949605 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.952221 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 28 15:52:51 crc kubenswrapper[4811]: I0128 15:52:51.956852 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec729dda-4b2d-458b-8e11-6cc8beacb717-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ec729dda-4b2d-458b-8e11-6cc8beacb717" (UID: "ec729dda-4b2d-458b-8e11-6cc8beacb717"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041334 4811 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041383 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec729dda-4b2d-458b-8e11-6cc8beacb717-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041396 4811 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041404 4811 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041413 4811 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec729dda-4b2d-458b-8e11-6cc8beacb717-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041421 4811 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec729dda-4b2d-458b-8e11-6cc8beacb717-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.041449 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8cm9\" (UniqueName: \"kubernetes.io/projected/ec729dda-4b2d-458b-8e11-6cc8beacb717-kube-api-access-z8cm9\") on node \"crc\" DevicePath \"\"" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.290664 4811 generic.go:334] "Generic (PLEG): container finished" podID="ec729dda-4b2d-458b-8e11-6cc8beacb717" containerID="ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452" exitCode=0 Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.290713 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.290728 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" event={"ID":"ec729dda-4b2d-458b-8e11-6cc8beacb717","Type":"ContainerDied","Data":"ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452"} Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.290770 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hcsxq" event={"ID":"ec729dda-4b2d-458b-8e11-6cc8beacb717","Type":"ContainerDied","Data":"eb62d835d4313f690614c429c59b7c8d5820ee8272260a0500e58c5effc81126"} Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.290796 4811 scope.go:117] "RemoveContainer" containerID="ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.326050 4811 scope.go:117] "RemoveContainer" containerID="ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452" Jan 28 15:52:52 crc kubenswrapper[4811]: E0128 15:52:52.326782 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452\": container with ID starting with ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452 not found: ID does not exist" containerID="ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.326878 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452"} err="failed to get container status \"ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452\": rpc error: code = NotFound desc = could not find container \"ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452\": container with ID starting with ae24e0e45acf85226f0dae1480c86d203f8fb183bcc1dce0dadcbde39d97d452 not found: ID does not exist" Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.353028 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hcsxq"] Jan 28 15:52:52 crc kubenswrapper[4811]: I0128 15:52:52.353071 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hcsxq"] Jan 28 15:52:54 crc kubenswrapper[4811]: I0128 15:52:54.347827 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec729dda-4b2d-458b-8e11-6cc8beacb717" path="/var/lib/kubelet/pods/ec729dda-4b2d-458b-8e11-6cc8beacb717/volumes" Jan 28 15:54:03 crc kubenswrapper[4811]: I0128 15:54:03.087855 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:54:03 crc kubenswrapper[4811]: I0128 15:54:03.088795 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:54:12 crc kubenswrapper[4811]: I0128 15:54:12.465070 4811 scope.go:117] "RemoveContainer" containerID="64acaf8bed1f63aa816dfb7143ffa1aa0443b236548a9aa7c1a65e6ab188de80" Jan 28 15:54:33 crc kubenswrapper[4811]: I0128 15:54:33.087636 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:54:33 crc kubenswrapper[4811]: I0128 15:54:33.088506 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:55:03 crc kubenswrapper[4811]: I0128 15:55:03.087707 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:55:03 crc kubenswrapper[4811]: I0128 15:55:03.088325 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:55:03 crc kubenswrapper[4811]: I0128 15:55:03.088382 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:55:03 crc kubenswrapper[4811]: I0128 15:55:03.089119 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f52ccc0fd9df1dd9ba999c3d6271df950a17bf565b0226b513fdf0e88b0f8fc"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 15:55:03 crc kubenswrapper[4811]: I0128 15:55:03.089191 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://2f52ccc0fd9df1dd9ba999c3d6271df950a17bf565b0226b513fdf0e88b0f8fc" gracePeriod=600 Jan 28 15:55:04 crc kubenswrapper[4811]: I0128 15:55:04.106265 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="2f52ccc0fd9df1dd9ba999c3d6271df950a17bf565b0226b513fdf0e88b0f8fc" exitCode=0 Jan 28 15:55:04 crc kubenswrapper[4811]: I0128 15:55:04.106409 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"2f52ccc0fd9df1dd9ba999c3d6271df950a17bf565b0226b513fdf0e88b0f8fc"} Jan 28 15:55:04 crc kubenswrapper[4811]: I0128 15:55:04.106828 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"27340b9d5e1d8d5e0d3aff5e91d7f338c280cdca6a861ea86d43ab3789100f43"} Jan 28 15:55:04 crc kubenswrapper[4811]: I0128 15:55:04.106870 4811 scope.go:117] "RemoveContainer" containerID="4eb1075bef300aa6f4b9a078a39752fc2bb693e12743f902b96be95bfa041b2e" Jan 28 15:56:12 crc kubenswrapper[4811]: I0128 15:56:12.512347 4811 scope.go:117] "RemoveContainer" containerID="dd410c52f3c3ae0a96a55740e06288933953a4b692c6619ac2863899af1486bf" Jan 28 15:56:12 crc kubenswrapper[4811]: I0128 15:56:12.546299 4811 scope.go:117] "RemoveContainer" containerID="98714443f526a3cf5836d4c45e3be4aac9eff794d76f0ea7499e003fd4f8480f" Jan 28 15:57:03 crc kubenswrapper[4811]: I0128 15:57:03.087570 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:57:03 crc kubenswrapper[4811]: I0128 15:57:03.089189 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:57:33 crc kubenswrapper[4811]: I0128 15:57:33.087822 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:57:33 crc kubenswrapper[4811]: I0128 15:57:33.088538 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:57:42 crc kubenswrapper[4811]: I0128 15:57:42.326489 4811 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.087842 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.088374 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.088422 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.089043 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27340b9d5e1d8d5e0d3aff5e91d7f338c280cdca6a861ea86d43ab3789100f43"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.089122 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://27340b9d5e1d8d5e0d3aff5e91d7f338c280cdca6a861ea86d43ab3789100f43" gracePeriod=600 Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.237106 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="27340b9d5e1d8d5e0d3aff5e91d7f338c280cdca6a861ea86d43ab3789100f43" exitCode=0 Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.237169 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"27340b9d5e1d8d5e0d3aff5e91d7f338c280cdca6a861ea86d43ab3789100f43"} Jan 28 15:58:03 crc kubenswrapper[4811]: I0128 15:58:03.237206 4811 scope.go:117] "RemoveContainer" containerID="2f52ccc0fd9df1dd9ba999c3d6271df950a17bf565b0226b513fdf0e88b0f8fc" Jan 28 15:58:04 crc kubenswrapper[4811]: I0128 15:58:04.250283 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"eabdb53100645a7db124d74211393e44eb29247556832b21f6981f388ac52c1c"} Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.377101 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dkzf6"] Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378037 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-controller" containerID="cri-o://b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378141 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="northd" containerID="cri-o://3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378123 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="nbdb" containerID="cri-o://70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378175 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378204 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-node" containerID="cri-o://13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378233 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-acl-logging" containerID="cri-o://575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.378326 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="sbdb" containerID="cri-o://fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.419122 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" containerID="cri-o://5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" gracePeriod=30 Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.670468 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/3.log" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.673824 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovn-acl-logging/0.log" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.674391 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovn-controller/0.log" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.674755 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.724871 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pxnlc"] Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725048 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725059 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725067 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="northd" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725072 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="northd" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725083 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725090 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725096 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec729dda-4b2d-458b-8e11-6cc8beacb717" containerName="registry" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725102 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec729dda-4b2d-458b-8e11-6cc8beacb717" containerName="registry" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725109 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725116 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725123 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725128 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725135 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725141 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725150 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="sbdb" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725155 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="sbdb" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725165 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-acl-logging" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725171 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-acl-logging" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725181 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kubecfg-setup" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725187 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kubecfg-setup" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725194 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="nbdb" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725200 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="nbdb" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725208 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-ovn-metrics" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725213 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-ovn-metrics" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725220 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-node" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725226 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-node" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725305 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-node" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725315 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725321 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725327 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-acl-logging" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725336 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="northd" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725343 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec729dda-4b2d-458b-8e11-6cc8beacb717" containerName="registry" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725351 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725359 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725364 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovn-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725372 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="nbdb" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725379 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="sbdb" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725388 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="kube-rbac-proxy-ovn-metrics" Jan 28 15:58:40 crc kubenswrapper[4811]: E0128 15:58:40.725483 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725490 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.725566 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" containerName="ovnkube-controller" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.726899 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732509 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-config\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732548 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-bin\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732570 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4mzj\" (UniqueName: \"kubernetes.io/projected/d3371905-8bb6-433c-b905-43d69e7b382a-kube-api-access-j4mzj\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732586 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-systemd\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732600 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-ovn\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732621 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732640 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-openvswitch\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732663 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-ovn-kubernetes\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732681 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3371905-8bb6-433c-b905-43d69e7b382a-ovn-node-metrics-cert\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732695 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-log-socket\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732711 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-env-overrides\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732723 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-node-log\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732740 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-systemd-units\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732755 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-etc-openvswitch\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732767 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-netd\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732781 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-kubelet\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732799 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-script-lib\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732814 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-netns\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732830 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-var-lib-openvswitch\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732842 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-slash\") pod \"d3371905-8bb6-433c-b905-43d69e7b382a\" (UID: \"d3371905-8bb6-433c-b905-43d69e7b382a\") " Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732906 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-env-overrides\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732933 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97cw9\" (UniqueName: \"kubernetes.io/projected/366df858-12b8-4efc-a884-ed709352193a-kube-api-access-97cw9\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-run-netns\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732971 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-ovnkube-config\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.732987 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-run-ovn-kubernetes\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733007 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-slash\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733024 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-systemd\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733038 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-systemd-units\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733055 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-var-lib-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733072 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733092 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-cni-netd\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733110 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733125 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-etc-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733142 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-node-log\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733158 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-log-socket\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733174 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-cni-bin\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733243 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733595 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.733627 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734328 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/366df858-12b8-4efc-a884-ed709352193a-ovn-node-metrics-cert\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734356 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-kubelet\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734380 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-ovnkube-script-lib\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734399 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-ovn\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734454 4811 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734468 4811 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734478 4811 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734516 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734538 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-slash" (OuterVolumeSpecName: "host-slash") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.734649 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-log-socket" (OuterVolumeSpecName: "log-socket") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.735517 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736516 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-node-log" (OuterVolumeSpecName: "node-log") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736794 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736575 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736600 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736849 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736624 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736191 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736665 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736646 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.736704 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.741860 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3371905-8bb6-433c-b905-43d69e7b382a-kube-api-access-j4mzj" (OuterVolumeSpecName: "kube-api-access-j4mzj") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "kube-api-access-j4mzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.744344 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3371905-8bb6-433c-b905-43d69e7b382a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.757182 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "d3371905-8bb6-433c-b905-43d69e7b382a" (UID: "d3371905-8bb6-433c-b905-43d69e7b382a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834710 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-systemd\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834751 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-systemd-units\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834768 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-var-lib-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834802 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-cni-netd\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834820 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834834 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-etc-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834850 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-node-log\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834865 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-log-socket\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834880 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-cni-bin\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834895 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/366df858-12b8-4efc-a884-ed709352193a-ovn-node-metrics-cert\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834911 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-kubelet\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834925 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-ovnkube-script-lib\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834944 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-ovn\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834958 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-env-overrides\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.834981 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97cw9\" (UniqueName: \"kubernetes.io/projected/366df858-12b8-4efc-a884-ed709352193a-kube-api-access-97cw9\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835001 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-run-netns\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835016 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-ovnkube-config\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835032 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-run-ovn-kubernetes\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835049 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-slash\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835087 4811 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835098 4811 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-slash\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835107 4811 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835134 4811 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835143 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4mzj\" (UniqueName: \"kubernetes.io/projected/d3371905-8bb6-433c-b905-43d69e7b382a-kube-api-access-j4mzj\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835153 4811 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835162 4811 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835172 4811 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835180 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3371905-8bb6-433c-b905-43d69e7b382a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835189 4811 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-log-socket\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835197 4811 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835204 4811 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-node-log\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835213 4811 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835221 4811 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835229 4811 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835237 4811 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3371905-8bb6-433c-b905-43d69e7b382a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835245 4811 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3371905-8bb6-433c-b905-43d69e7b382a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835281 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-slash\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835313 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-systemd\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835335 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-systemd-units\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835354 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-var-lib-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835374 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835393 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-cni-netd\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835412 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835456 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-etc-openvswitch\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835486 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-node-log\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835512 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-log-socket\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.835600 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-cni-bin\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.836539 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-env-overrides\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.836576 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-kubelet\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.836598 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-run-ovn\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.836680 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-run-netns\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.836894 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/366df858-12b8-4efc-a884-ed709352193a-host-run-ovn-kubernetes\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.837568 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-ovnkube-script-lib\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.838089 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/366df858-12b8-4efc-a884-ed709352193a-ovnkube-config\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.842578 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/366df858-12b8-4efc-a884-ed709352193a-ovn-node-metrics-cert\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:40 crc kubenswrapper[4811]: I0128 15:58:40.855975 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97cw9\" (UniqueName: \"kubernetes.io/projected/366df858-12b8-4efc-a884-ed709352193a-kube-api-access-97cw9\") pod \"ovnkube-node-pxnlc\" (UID: \"366df858-12b8-4efc-a884-ed709352193a\") " pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.056827 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.462522 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/2.log" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.464275 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/1.log" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.464524 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc61e84d-a003-46db-924b-7f9ba7460f13" containerID="b9e6fde8714d52f3ab851df784d7d4129ecbcd7b592df17623bd72ae460ef5d9" exitCode=2 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.464601 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerDied","Data":"b9e6fde8714d52f3ab851df784d7d4129ecbcd7b592df17623bd72ae460ef5d9"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.464986 4811 scope.go:117] "RemoveContainer" containerID="204fd678c7f2d914319df1c9cc694ffc516812b85a27536428e0e91fecb39c33" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.465789 4811 scope.go:117] "RemoveContainer" containerID="b9e6fde8714d52f3ab851df784d7d4129ecbcd7b592df17623bd72ae460ef5d9" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.467209 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovnkube-controller/3.log" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.472378 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovn-acl-logging/0.log" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473002 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dkzf6_d3371905-8bb6-433c-b905-43d69e7b382a/ovn-controller/0.log" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473558 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473585 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473598 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473609 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473620 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473632 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473642 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" exitCode=143 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473655 4811 generic.go:334] "Generic (PLEG): container finished" podID="d3371905-8bb6-433c-b905-43d69e7b382a" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" exitCode=143 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473715 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473751 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473773 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473789 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473805 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473824 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473841 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473856 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473867 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473877 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473887 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473896 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473905 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473915 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473925 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473959 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473973 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.473990 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474005 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474015 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474025 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474035 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474045 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474055 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474065 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474074 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474083 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474097 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474115 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474127 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474138 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474149 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474158 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474168 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474178 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474189 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474200 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474210 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474223 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" event={"ID":"d3371905-8bb6-433c-b905-43d69e7b382a","Type":"ContainerDied","Data":"26b4e82fe90c5d4c9d3413006a47ff41874d0e2d6bf1c08ea4204ff1ee39de15"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474239 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474251 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474261 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474271 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474281 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474290 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474299 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474309 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474319 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474328 4811 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.474482 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dkzf6" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.487268 4811 generic.go:334] "Generic (PLEG): container finished" podID="366df858-12b8-4efc-a884-ed709352193a" containerID="6917ba7c66d8bf3a493056a8331610da3e0f8de7cf1e376d1286ab1f9f00b55b" exitCode=0 Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.487302 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerDied","Data":"6917ba7c66d8bf3a493056a8331610da3e0f8de7cf1e376d1286ab1f9f00b55b"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.487323 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"7abef24bd4a8343ca2291b41571ca311d1eb75b567420c266f299239dac9da2c"} Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.498227 4811 scope.go:117] "RemoveContainer" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.556509 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.593107 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dkzf6"] Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.600983 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dkzf6"] Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.601629 4811 scope.go:117] "RemoveContainer" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.646330 4811 scope.go:117] "RemoveContainer" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.675214 4811 scope.go:117] "RemoveContainer" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.689137 4811 scope.go:117] "RemoveContainer" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.704601 4811 scope.go:117] "RemoveContainer" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.727178 4811 scope.go:117] "RemoveContainer" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.766006 4811 scope.go:117] "RemoveContainer" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.780655 4811 scope.go:117] "RemoveContainer" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.795383 4811 scope.go:117] "RemoveContainer" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.795741 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": container with ID starting with 5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23 not found: ID does not exist" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.795790 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} err="failed to get container status \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": rpc error: code = NotFound desc = could not find container \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": container with ID starting with 5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.795811 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.796087 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": container with ID starting with 5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16 not found: ID does not exist" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796110 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} err="failed to get container status \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": rpc error: code = NotFound desc = could not find container \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": container with ID starting with 5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796121 4811 scope.go:117] "RemoveContainer" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.796403 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": container with ID starting with fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025 not found: ID does not exist" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796448 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} err="failed to get container status \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": rpc error: code = NotFound desc = could not find container \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": container with ID starting with fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796469 4811 scope.go:117] "RemoveContainer" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.796685 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": container with ID starting with 70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265 not found: ID does not exist" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796707 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} err="failed to get container status \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": rpc error: code = NotFound desc = could not find container \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": container with ID starting with 70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796742 4811 scope.go:117] "RemoveContainer" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.796950 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": container with ID starting with 3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6 not found: ID does not exist" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.796992 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} err="failed to get container status \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": rpc error: code = NotFound desc = could not find container \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": container with ID starting with 3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.797013 4811 scope.go:117] "RemoveContainer" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.797278 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": container with ID starting with 360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77 not found: ID does not exist" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.797331 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} err="failed to get container status \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": rpc error: code = NotFound desc = could not find container \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": container with ID starting with 360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.797390 4811 scope.go:117] "RemoveContainer" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.797814 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": container with ID starting with 13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c not found: ID does not exist" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.797861 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} err="failed to get container status \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": rpc error: code = NotFound desc = could not find container \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": container with ID starting with 13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.797882 4811 scope.go:117] "RemoveContainer" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.798138 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": container with ID starting with 575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698 not found: ID does not exist" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.798169 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} err="failed to get container status \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": rpc error: code = NotFound desc = could not find container \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": container with ID starting with 575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.798189 4811 scope.go:117] "RemoveContainer" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.799288 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": container with ID starting with b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf not found: ID does not exist" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.799352 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} err="failed to get container status \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": rpc error: code = NotFound desc = could not find container \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": container with ID starting with b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.799372 4811 scope.go:117] "RemoveContainer" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" Jan 28 15:58:41 crc kubenswrapper[4811]: E0128 15:58:41.799716 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": container with ID starting with b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c not found: ID does not exist" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.799752 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} err="failed to get container status \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": rpc error: code = NotFound desc = could not find container \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": container with ID starting with b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.799775 4811 scope.go:117] "RemoveContainer" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.800036 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} err="failed to get container status \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": rpc error: code = NotFound desc = could not find container \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": container with ID starting with 5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.800058 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.800400 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} err="failed to get container status \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": rpc error: code = NotFound desc = could not find container \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": container with ID starting with 5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.800421 4811 scope.go:117] "RemoveContainer" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.800665 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} err="failed to get container status \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": rpc error: code = NotFound desc = could not find container \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": container with ID starting with fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.800690 4811 scope.go:117] "RemoveContainer" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801012 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} err="failed to get container status \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": rpc error: code = NotFound desc = could not find container \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": container with ID starting with 70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801030 4811 scope.go:117] "RemoveContainer" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801258 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} err="failed to get container status \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": rpc error: code = NotFound desc = could not find container \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": container with ID starting with 3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801279 4811 scope.go:117] "RemoveContainer" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801535 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} err="failed to get container status \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": rpc error: code = NotFound desc = could not find container \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": container with ID starting with 360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801557 4811 scope.go:117] "RemoveContainer" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801785 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} err="failed to get container status \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": rpc error: code = NotFound desc = could not find container \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": container with ID starting with 13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.801803 4811 scope.go:117] "RemoveContainer" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802025 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} err="failed to get container status \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": rpc error: code = NotFound desc = could not find container \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": container with ID starting with 575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802052 4811 scope.go:117] "RemoveContainer" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802285 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} err="failed to get container status \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": rpc error: code = NotFound desc = could not find container \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": container with ID starting with b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802310 4811 scope.go:117] "RemoveContainer" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802547 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} err="failed to get container status \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": rpc error: code = NotFound desc = could not find container \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": container with ID starting with b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802576 4811 scope.go:117] "RemoveContainer" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802842 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} err="failed to get container status \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": rpc error: code = NotFound desc = could not find container \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": container with ID starting with 5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.802866 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.803180 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} err="failed to get container status \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": rpc error: code = NotFound desc = could not find container \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": container with ID starting with 5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.803200 4811 scope.go:117] "RemoveContainer" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.803489 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} err="failed to get container status \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": rpc error: code = NotFound desc = could not find container \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": container with ID starting with fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.803509 4811 scope.go:117] "RemoveContainer" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.803844 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} err="failed to get container status \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": rpc error: code = NotFound desc = could not find container \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": container with ID starting with 70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.803862 4811 scope.go:117] "RemoveContainer" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804122 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} err="failed to get container status \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": rpc error: code = NotFound desc = could not find container \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": container with ID starting with 3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804138 4811 scope.go:117] "RemoveContainer" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804395 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} err="failed to get container status \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": rpc error: code = NotFound desc = could not find container \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": container with ID starting with 360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804411 4811 scope.go:117] "RemoveContainer" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804656 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} err="failed to get container status \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": rpc error: code = NotFound desc = could not find container \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": container with ID starting with 13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804684 4811 scope.go:117] "RemoveContainer" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.804992 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} err="failed to get container status \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": rpc error: code = NotFound desc = could not find container \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": container with ID starting with 575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805028 4811 scope.go:117] "RemoveContainer" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805290 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} err="failed to get container status \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": rpc error: code = NotFound desc = could not find container \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": container with ID starting with b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805308 4811 scope.go:117] "RemoveContainer" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805673 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} err="failed to get container status \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": rpc error: code = NotFound desc = could not find container \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": container with ID starting with b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805692 4811 scope.go:117] "RemoveContainer" containerID="5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805965 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23"} err="failed to get container status \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": rpc error: code = NotFound desc = could not find container \"5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23\": container with ID starting with 5f0050e1d3952d6c23ba672537a0f0d69e033e6a7cef3fb4236b9b5f970b4e23 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.805983 4811 scope.go:117] "RemoveContainer" containerID="5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.806262 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16"} err="failed to get container status \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": rpc error: code = NotFound desc = could not find container \"5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16\": container with ID starting with 5a7c911b8901e1963fe372b87999b0e5961f4fe4a184c52d659bee9aece40d16 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.806281 4811 scope.go:117] "RemoveContainer" containerID="fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.806619 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025"} err="failed to get container status \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": rpc error: code = NotFound desc = could not find container \"fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025\": container with ID starting with fd79575010a78553a5af2fc10b4272799c2a09d903b241e5ab51945d13c4a025 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.806641 4811 scope.go:117] "RemoveContainer" containerID="70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.806906 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265"} err="failed to get container status \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": rpc error: code = NotFound desc = could not find container \"70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265\": container with ID starting with 70c3fda762fe236d14fa4dc0eb6c33fbf1c958d805f3f4fc470e0f822ecf1265 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.806924 4811 scope.go:117] "RemoveContainer" containerID="3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.807118 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6"} err="failed to get container status \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": rpc error: code = NotFound desc = could not find container \"3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6\": container with ID starting with 3dc69f5b5a641c4af16865277bf9a20967fb5d3074f48337012dd6fb9c2fcac6 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.807148 4811 scope.go:117] "RemoveContainer" containerID="360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.807976 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77"} err="failed to get container status \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": rpc error: code = NotFound desc = could not find container \"360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77\": container with ID starting with 360229d9a54a5f329512ace914016983eb5c50b00284cfe69dec02db4c794f77 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808027 4811 scope.go:117] "RemoveContainer" containerID="13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808266 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c"} err="failed to get container status \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": rpc error: code = NotFound desc = could not find container \"13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c\": container with ID starting with 13ea8b778641f60f1be3cd706a8db6350d0cc64100cc207323931240900f5e4c not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808293 4811 scope.go:117] "RemoveContainer" containerID="575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808528 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698"} err="failed to get container status \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": rpc error: code = NotFound desc = could not find container \"575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698\": container with ID starting with 575abad4b33b5057b66dc08501318b2b6ae57680617ba85279ef142425ef1698 not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808557 4811 scope.go:117] "RemoveContainer" containerID="b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808753 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf"} err="failed to get container status \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": rpc error: code = NotFound desc = could not find container \"b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf\": container with ID starting with b90bbb7031db1771e5b6f0d735f37ece30c85c852346a121757be2619a37ccaf not found: ID does not exist" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.808786 4811 scope.go:117] "RemoveContainer" containerID="b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c" Jan 28 15:58:41 crc kubenswrapper[4811]: I0128 15:58:41.809020 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c"} err="failed to get container status \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": rpc error: code = NotFound desc = could not find container \"b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c\": container with ID starting with b9711116d05af459680fb793230c9998331b87559a38f31fe7cfc7c4fde6945c not found: ID does not exist" Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.346853 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3371905-8bb6-433c-b905-43d69e7b382a" path="/var/lib/kubelet/pods/d3371905-8bb6-433c-b905-43d69e7b382a/volumes" Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.503039 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mw4th_dc61e84d-a003-46db-924b-7f9ba7460f13/kube-multus/2.log" Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.503131 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mw4th" event={"ID":"dc61e84d-a003-46db-924b-7f9ba7460f13","Type":"ContainerStarted","Data":"ca4ae7660c8cc6c725953cafbd04e346c0ff5862148c3668aeea5e2aea79e799"} Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.509632 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"19d8845b2bcd5b826c474a77d028a751b86a7df2e7967429867471e878dd131b"} Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.509696 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"c46c398bc318ed98307859103621f529c2dadd61eaca82ab908366d51b9d30bc"} Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.509741 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"4d1bb2de93346014e4047b45780a4353a8ee61ce41418ec1e0b1d9d203133a70"} Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.509769 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"06880b43da22ebb372424698e85e069fe891887ad7e9c60d0ce962dc8c2f4254"} Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.509802 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"beaeda9a6ebadebd353eabe98ccf83b6d70c18df05f9d1c299b27696fa0e62cb"} Jan 28 15:58:42 crc kubenswrapper[4811]: I0128 15:58:42.509828 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"f9b5b69e03da861c1a889541954d9d44d7f44931ce463a354de682426e47fe00"} Jan 28 15:58:44 crc kubenswrapper[4811]: I0128 15:58:44.525579 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"8426f05a60f2b6968cc3687ce03110f56d4cc4f6910f38b34da177ba5f10a073"} Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.433743 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-tx9bl"] Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.434682 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.439841 4811 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-jbh6j" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.439865 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.439972 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.440226 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.492533 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/325e4634-8f82-469a-a1d6-0b9f0364f1dc-node-mnt\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.492602 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/325e4634-8f82-469a-a1d6-0b9f0364f1dc-crc-storage\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.492709 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr2mk\" (UniqueName: \"kubernetes.io/projected/325e4634-8f82-469a-a1d6-0b9f0364f1dc-kube-api-access-sr2mk\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.594659 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr2mk\" (UniqueName: \"kubernetes.io/projected/325e4634-8f82-469a-a1d6-0b9f0364f1dc-kube-api-access-sr2mk\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.594788 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/325e4634-8f82-469a-a1d6-0b9f0364f1dc-node-mnt\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.594892 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/325e4634-8f82-469a-a1d6-0b9f0364f1dc-crc-storage\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.595089 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/325e4634-8f82-469a-a1d6-0b9f0364f1dc-node-mnt\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.596177 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/325e4634-8f82-469a-a1d6-0b9f0364f1dc-crc-storage\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.629749 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr2mk\" (UniqueName: \"kubernetes.io/projected/325e4634-8f82-469a-a1d6-0b9f0364f1dc-kube-api-access-sr2mk\") pod \"crc-storage-crc-tx9bl\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: I0128 15:58:45.763060 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: E0128 15:58:45.806410 4811 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(c91df5958946e3c9a354ccd92fcd482121aee8316bc5c6547ced09256e425e20): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 28 15:58:45 crc kubenswrapper[4811]: E0128 15:58:45.806500 4811 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(c91df5958946e3c9a354ccd92fcd482121aee8316bc5c6547ced09256e425e20): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: E0128 15:58:45.806521 4811 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(c91df5958946e3c9a354ccd92fcd482121aee8316bc5c6547ced09256e425e20): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:45 crc kubenswrapper[4811]: E0128 15:58:45.806571 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-tx9bl_crc-storage(325e4634-8f82-469a-a1d6-0b9f0364f1dc)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-tx9bl_crc-storage(325e4634-8f82-469a-a1d6-0b9f0364f1dc)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(c91df5958946e3c9a354ccd92fcd482121aee8316bc5c6547ced09256e425e20): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-tx9bl" podUID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.461797 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tx9bl"] Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.462559 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.463465 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:47 crc kubenswrapper[4811]: E0128 15:58:47.498135 4811 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(d38848b470abe3542a26849343f092ac35f60650ce4c7e688491eb3d7a550a3e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 28 15:58:47 crc kubenswrapper[4811]: E0128 15:58:47.498199 4811 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(d38848b470abe3542a26849343f092ac35f60650ce4c7e688491eb3d7a550a3e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:47 crc kubenswrapper[4811]: E0128 15:58:47.498226 4811 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(d38848b470abe3542a26849343f092ac35f60650ce4c7e688491eb3d7a550a3e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:58:47 crc kubenswrapper[4811]: E0128 15:58:47.498272 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-tx9bl_crc-storage(325e4634-8f82-469a-a1d6-0b9f0364f1dc)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-tx9bl_crc-storage(325e4634-8f82-469a-a1d6-0b9f0364f1dc)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tx9bl_crc-storage_325e4634-8f82-469a-a1d6-0b9f0364f1dc_0(d38848b470abe3542a26849343f092ac35f60650ce4c7e688491eb3d7a550a3e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-tx9bl" podUID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.545619 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" event={"ID":"366df858-12b8-4efc-a884-ed709352193a","Type":"ContainerStarted","Data":"784bd76dc8fa6bd3fbb664c7a48ac8f96d2c342b9ef2fea0238b01795ab45efa"} Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.545893 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.545940 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.580471 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" podStartSLOduration=7.580452753 podStartE2EDuration="7.580452753s" podCreationTimestamp="2026-01-28 15:58:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:58:47.576856868 +0000 UTC m=+820.331220451" watchObservedRunningTime="2026-01-28 15:58:47.580452753 +0000 UTC m=+820.334816336" Jan 28 15:58:47 crc kubenswrapper[4811]: I0128 15:58:47.583040 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:48 crc kubenswrapper[4811]: I0128 15:58:48.550791 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:58:48 crc kubenswrapper[4811]: I0128 15:58:48.589288 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:59:01 crc kubenswrapper[4811]: I0128 15:59:01.338875 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:59:01 crc kubenswrapper[4811]: I0128 15:59:01.339455 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:59:01 crc kubenswrapper[4811]: I0128 15:59:01.749264 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tx9bl"] Jan 28 15:59:01 crc kubenswrapper[4811]: W0128 15:59:01.756109 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod325e4634_8f82_469a_a1d6_0b9f0364f1dc.slice/crio-e54251f57c866add747d138abeff26be0ab1ce4e4058d18dc6c6a0f1bc551a6d WatchSource:0}: Error finding container e54251f57c866add747d138abeff26be0ab1ce4e4058d18dc6c6a0f1bc551a6d: Status 404 returned error can't find the container with id e54251f57c866add747d138abeff26be0ab1ce4e4058d18dc6c6a0f1bc551a6d Jan 28 15:59:01 crc kubenswrapper[4811]: I0128 15:59:01.758237 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 15:59:02 crc kubenswrapper[4811]: I0128 15:59:02.637055 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tx9bl" event={"ID":"325e4634-8f82-469a-a1d6-0b9f0364f1dc","Type":"ContainerStarted","Data":"e54251f57c866add747d138abeff26be0ab1ce4e4058d18dc6c6a0f1bc551a6d"} Jan 28 15:59:03 crc kubenswrapper[4811]: I0128 15:59:03.644055 4811 generic.go:334] "Generic (PLEG): container finished" podID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" containerID="1baae8a95ec118fb48ee83d472f5a4ced1ac9a74692d4755eb2f8907a281a9ac" exitCode=0 Jan 28 15:59:03 crc kubenswrapper[4811]: I0128 15:59:03.644110 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tx9bl" event={"ID":"325e4634-8f82-469a-a1d6-0b9f0364f1dc","Type":"ContainerDied","Data":"1baae8a95ec118fb48ee83d472f5a4ced1ac9a74692d4755eb2f8907a281a9ac"} Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.916671 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.949378 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/325e4634-8f82-469a-a1d6-0b9f0364f1dc-node-mnt\") pod \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.949457 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/325e4634-8f82-469a-a1d6-0b9f0364f1dc-crc-storage\") pod \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.949498 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/325e4634-8f82-469a-a1d6-0b9f0364f1dc-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "325e4634-8f82-469a-a1d6-0b9f0364f1dc" (UID: "325e4634-8f82-469a-a1d6-0b9f0364f1dc"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.949502 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr2mk\" (UniqueName: \"kubernetes.io/projected/325e4634-8f82-469a-a1d6-0b9f0364f1dc-kube-api-access-sr2mk\") pod \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\" (UID: \"325e4634-8f82-469a-a1d6-0b9f0364f1dc\") " Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.949669 4811 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/325e4634-8f82-469a-a1d6-0b9f0364f1dc-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.954572 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/325e4634-8f82-469a-a1d6-0b9f0364f1dc-kube-api-access-sr2mk" (OuterVolumeSpecName: "kube-api-access-sr2mk") pod "325e4634-8f82-469a-a1d6-0b9f0364f1dc" (UID: "325e4634-8f82-469a-a1d6-0b9f0364f1dc"). InnerVolumeSpecName "kube-api-access-sr2mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:59:04 crc kubenswrapper[4811]: I0128 15:59:04.974784 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/325e4634-8f82-469a-a1d6-0b9f0364f1dc-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "325e4634-8f82-469a-a1d6-0b9f0364f1dc" (UID: "325e4634-8f82-469a-a1d6-0b9f0364f1dc"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 15:59:05 crc kubenswrapper[4811]: I0128 15:59:05.050452 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr2mk\" (UniqueName: \"kubernetes.io/projected/325e4634-8f82-469a-a1d6-0b9f0364f1dc-kube-api-access-sr2mk\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:05 crc kubenswrapper[4811]: I0128 15:59:05.050495 4811 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/325e4634-8f82-469a-a1d6-0b9f0364f1dc-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:05 crc kubenswrapper[4811]: I0128 15:59:05.659741 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tx9bl" event={"ID":"325e4634-8f82-469a-a1d6-0b9f0364f1dc","Type":"ContainerDied","Data":"e54251f57c866add747d138abeff26be0ab1ce4e4058d18dc6c6a0f1bc551a6d"} Jan 28 15:59:05 crc kubenswrapper[4811]: I0128 15:59:05.659809 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e54251f57c866add747d138abeff26be0ab1ce4e4058d18dc6c6a0f1bc551a6d" Jan 28 15:59:05 crc kubenswrapper[4811]: I0128 15:59:05.659871 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tx9bl" Jan 28 15:59:11 crc kubenswrapper[4811]: I0128 15:59:11.084710 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pxnlc" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.078585 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c"] Jan 28 15:59:12 crc kubenswrapper[4811]: E0128 15:59:12.078799 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" containerName="storage" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.078811 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" containerName="storage" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.078917 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" containerName="storage" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.079599 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.083638 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.092963 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c"] Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.249850 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.250233 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvts6\" (UniqueName: \"kubernetes.io/projected/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-kube-api-access-jvts6\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.250258 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.351843 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.351928 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.352079 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvts6\" (UniqueName: \"kubernetes.io/projected/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-kube-api-access-jvts6\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.353409 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.353411 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.390225 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvts6\" (UniqueName: \"kubernetes.io/projected/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-kube-api-access-jvts6\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.431887 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.696471 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c"] Jan 28 15:59:12 crc kubenswrapper[4811]: I0128 15:59:12.714159 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" event={"ID":"4a2037fd-b35c-4712-9b1f-8cc7586fcabe","Type":"ContainerStarted","Data":"a4c7b2e159b548ecec787919841f0a094e6ced6b63b6a7e399341ed738f04360"} Jan 28 15:59:13 crc kubenswrapper[4811]: I0128 15:59:13.722892 4811 generic.go:334] "Generic (PLEG): container finished" podID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerID="76eae0a7833bacc103726516aa1533a3779fbd6bd26444de94fa59cc2b069ca9" exitCode=0 Jan 28 15:59:13 crc kubenswrapper[4811]: I0128 15:59:13.723173 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" event={"ID":"4a2037fd-b35c-4712-9b1f-8cc7586fcabe","Type":"ContainerDied","Data":"76eae0a7833bacc103726516aa1533a3779fbd6bd26444de94fa59cc2b069ca9"} Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.452889 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gw62s"] Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.455937 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.469413 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gw62s"] Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.583271 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-catalog-content\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.583324 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-utilities\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.583402 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84ldl\" (UniqueName: \"kubernetes.io/projected/adc13bee-20cc-4d9d-974e-80ff00a64a6a-kube-api-access-84ldl\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.685016 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-catalog-content\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.685071 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-utilities\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.685161 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84ldl\" (UniqueName: \"kubernetes.io/projected/adc13bee-20cc-4d9d-974e-80ff00a64a6a-kube-api-access-84ldl\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.685553 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-catalog-content\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.685696 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-utilities\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.710850 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84ldl\" (UniqueName: \"kubernetes.io/projected/adc13bee-20cc-4d9d-974e-80ff00a64a6a-kube-api-access-84ldl\") pod \"redhat-operators-gw62s\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:14 crc kubenswrapper[4811]: I0128 15:59:14.787380 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:15 crc kubenswrapper[4811]: I0128 15:59:15.024258 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gw62s"] Jan 28 15:59:15 crc kubenswrapper[4811]: W0128 15:59:15.050014 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadc13bee_20cc_4d9d_974e_80ff00a64a6a.slice/crio-fbe125b700236d784eafe3e92534a51903c9b76f019fa0fb5c4ca32ba8764f49 WatchSource:0}: Error finding container fbe125b700236d784eafe3e92534a51903c9b76f019fa0fb5c4ca32ba8764f49: Status 404 returned error can't find the container with id fbe125b700236d784eafe3e92534a51903c9b76f019fa0fb5c4ca32ba8764f49 Jan 28 15:59:15 crc kubenswrapper[4811]: I0128 15:59:15.734162 4811 generic.go:334] "Generic (PLEG): container finished" podID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerID="09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab" exitCode=0 Jan 28 15:59:15 crc kubenswrapper[4811]: I0128 15:59:15.734250 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerDied","Data":"09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab"} Jan 28 15:59:15 crc kubenswrapper[4811]: I0128 15:59:15.734772 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerStarted","Data":"fbe125b700236d784eafe3e92534a51903c9b76f019fa0fb5c4ca32ba8764f49"} Jan 28 15:59:15 crc kubenswrapper[4811]: I0128 15:59:15.738109 4811 generic.go:334] "Generic (PLEG): container finished" podID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerID="375f50c7c209ca58776d458e91e789584feec031b2b65dee3d7e717d529e659d" exitCode=0 Jan 28 15:59:15 crc kubenswrapper[4811]: I0128 15:59:15.738140 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" event={"ID":"4a2037fd-b35c-4712-9b1f-8cc7586fcabe","Type":"ContainerDied","Data":"375f50c7c209ca58776d458e91e789584feec031b2b65dee3d7e717d529e659d"} Jan 28 15:59:16 crc kubenswrapper[4811]: I0128 15:59:16.750784 4811 generic.go:334] "Generic (PLEG): container finished" podID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerID="8a769e94bc8412da5f84ff2d2d6aaf84548ffa70a637d27e48ea0c18391c217b" exitCode=0 Jan 28 15:59:16 crc kubenswrapper[4811]: I0128 15:59:16.750899 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" event={"ID":"4a2037fd-b35c-4712-9b1f-8cc7586fcabe","Type":"ContainerDied","Data":"8a769e94bc8412da5f84ff2d2d6aaf84548ffa70a637d27e48ea0c18391c217b"} Jan 28 15:59:16 crc kubenswrapper[4811]: I0128 15:59:16.753979 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerStarted","Data":"ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47"} Jan 28 15:59:17 crc kubenswrapper[4811]: I0128 15:59:17.761481 4811 generic.go:334] "Generic (PLEG): container finished" podID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerID="ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47" exitCode=0 Jan 28 15:59:17 crc kubenswrapper[4811]: I0128 15:59:17.761499 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerDied","Data":"ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47"} Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.047398 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.231414 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-bundle\") pod \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.232005 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-util\") pod \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.232193 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvts6\" (UniqueName: \"kubernetes.io/projected/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-kube-api-access-jvts6\") pod \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\" (UID: \"4a2037fd-b35c-4712-9b1f-8cc7586fcabe\") " Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.233129 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-bundle" (OuterVolumeSpecName: "bundle") pod "4a2037fd-b35c-4712-9b1f-8cc7586fcabe" (UID: "4a2037fd-b35c-4712-9b1f-8cc7586fcabe"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.239162 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-kube-api-access-jvts6" (OuterVolumeSpecName: "kube-api-access-jvts6") pod "4a2037fd-b35c-4712-9b1f-8cc7586fcabe" (UID: "4a2037fd-b35c-4712-9b1f-8cc7586fcabe"). InnerVolumeSpecName "kube-api-access-jvts6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.334069 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvts6\" (UniqueName: \"kubernetes.io/projected/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-kube-api-access-jvts6\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.334108 4811 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.531203 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-util" (OuterVolumeSpecName: "util") pod "4a2037fd-b35c-4712-9b1f-8cc7586fcabe" (UID: "4a2037fd-b35c-4712-9b1f-8cc7586fcabe"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.536291 4811 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2037fd-b35c-4712-9b1f-8cc7586fcabe-util\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.770876 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerStarted","Data":"3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38"} Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.778916 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" event={"ID":"4a2037fd-b35c-4712-9b1f-8cc7586fcabe","Type":"ContainerDied","Data":"a4c7b2e159b548ecec787919841f0a094e6ced6b63b6a7e399341ed738f04360"} Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.778997 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.779004 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4c7b2e159b548ecec787919841f0a094e6ced6b63b6a7e399341ed738f04360" Jan 28 15:59:18 crc kubenswrapper[4811]: I0128 15:59:18.806287 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gw62s" podStartSLOduration=2.33105281 podStartE2EDuration="4.806268109s" podCreationTimestamp="2026-01-28 15:59:14 +0000 UTC" firstStartedPulling="2026-01-28 15:59:15.735948319 +0000 UTC m=+848.490311922" lastFinishedPulling="2026-01-28 15:59:18.211163638 +0000 UTC m=+850.965527221" observedRunningTime="2026-01-28 15:59:18.801191704 +0000 UTC m=+851.555555297" watchObservedRunningTime="2026-01-28 15:59:18.806268109 +0000 UTC m=+851.560631712" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.427889 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-wt5f7"] Jan 28 15:59:22 crc kubenswrapper[4811]: E0128 15:59:22.428965 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="pull" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.429073 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="pull" Jan 28 15:59:22 crc kubenswrapper[4811]: E0128 15:59:22.429152 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="extract" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.429217 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="extract" Jan 28 15:59:22 crc kubenswrapper[4811]: E0128 15:59:22.429299 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="util" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.429387 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="util" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.429593 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a2037fd-b35c-4712-9b1f-8cc7586fcabe" containerName="extract" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.430098 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.432304 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.436148 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-255xs" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.436377 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.439110 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-wt5f7"] Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.590262 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cn4f\" (UniqueName: \"kubernetes.io/projected/130754b8-3e75-4b70-b0a9-92f912c250f0-kube-api-access-4cn4f\") pod \"nmstate-operator-646758c888-wt5f7\" (UID: \"130754b8-3e75-4b70-b0a9-92f912c250f0\") " pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.691788 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cn4f\" (UniqueName: \"kubernetes.io/projected/130754b8-3e75-4b70-b0a9-92f912c250f0-kube-api-access-4cn4f\") pod \"nmstate-operator-646758c888-wt5f7\" (UID: \"130754b8-3e75-4b70-b0a9-92f912c250f0\") " pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.715149 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cn4f\" (UniqueName: \"kubernetes.io/projected/130754b8-3e75-4b70-b0a9-92f912c250f0-kube-api-access-4cn4f\") pod \"nmstate-operator-646758c888-wt5f7\" (UID: \"130754b8-3e75-4b70-b0a9-92f912c250f0\") " pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" Jan 28 15:59:22 crc kubenswrapper[4811]: I0128 15:59:22.744778 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" Jan 28 15:59:23 crc kubenswrapper[4811]: I0128 15:59:23.025248 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-wt5f7"] Jan 28 15:59:23 crc kubenswrapper[4811]: W0128 15:59:23.050728 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod130754b8_3e75_4b70_b0a9_92f912c250f0.slice/crio-c964902784aea77a060fafaf6a17407b5827d9ec9b47a898fc2c8d0ba0ef737e WatchSource:0}: Error finding container c964902784aea77a060fafaf6a17407b5827d9ec9b47a898fc2c8d0ba0ef737e: Status 404 returned error can't find the container with id c964902784aea77a060fafaf6a17407b5827d9ec9b47a898fc2c8d0ba0ef737e Jan 28 15:59:23 crc kubenswrapper[4811]: I0128 15:59:23.805635 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" event={"ID":"130754b8-3e75-4b70-b0a9-92f912c250f0","Type":"ContainerStarted","Data":"c964902784aea77a060fafaf6a17407b5827d9ec9b47a898fc2c8d0ba0ef737e"} Jan 28 15:59:24 crc kubenswrapper[4811]: I0128 15:59:24.787657 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:24 crc kubenswrapper[4811]: I0128 15:59:24.787739 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:24 crc kubenswrapper[4811]: I0128 15:59:24.834030 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:24 crc kubenswrapper[4811]: I0128 15:59:24.887451 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:25 crc kubenswrapper[4811]: I0128 15:59:25.822239 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" event={"ID":"130754b8-3e75-4b70-b0a9-92f912c250f0","Type":"ContainerStarted","Data":"0796108b07c600f0f0f65f9411f4ca4b0a56be9e2dbf31205884746c5eb602e3"} Jan 28 15:59:25 crc kubenswrapper[4811]: I0128 15:59:25.845389 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-wt5f7" podStartSLOduration=1.5946239100000001 podStartE2EDuration="3.845360963s" podCreationTimestamp="2026-01-28 15:59:22 +0000 UTC" firstStartedPulling="2026-01-28 15:59:23.052842351 +0000 UTC m=+855.807205944" lastFinishedPulling="2026-01-28 15:59:25.303579414 +0000 UTC m=+858.057942997" observedRunningTime="2026-01-28 15:59:25.839250322 +0000 UTC m=+858.593613905" watchObservedRunningTime="2026-01-28 15:59:25.845360963 +0000 UTC m=+858.599724586" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.437968 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gw62s"] Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.438207 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gw62s" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="registry-server" containerID="cri-o://3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38" gracePeriod=2 Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.800695 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.834767 4811 generic.go:334] "Generic (PLEG): container finished" podID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerID="3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38" exitCode=0 Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.834817 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerDied","Data":"3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38"} Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.834832 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gw62s" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.834856 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gw62s" event={"ID":"adc13bee-20cc-4d9d-974e-80ff00a64a6a","Type":"ContainerDied","Data":"fbe125b700236d784eafe3e92534a51903c9b76f019fa0fb5c4ca32ba8764f49"} Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.834882 4811 scope.go:117] "RemoveContainer" containerID="3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.871800 4811 scope.go:117] "RemoveContainer" containerID="ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.900170 4811 scope.go:117] "RemoveContainer" containerID="09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.915225 4811 scope.go:117] "RemoveContainer" containerID="3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38" Jan 28 15:59:27 crc kubenswrapper[4811]: E0128 15:59:27.915674 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38\": container with ID starting with 3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38 not found: ID does not exist" containerID="3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.915723 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38"} err="failed to get container status \"3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38\": rpc error: code = NotFound desc = could not find container \"3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38\": container with ID starting with 3335c98ab9197f5d2b76c7162763cf45a71975202d75890ff064286ed87a7c38 not found: ID does not exist" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.915746 4811 scope.go:117] "RemoveContainer" containerID="ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47" Jan 28 15:59:27 crc kubenswrapper[4811]: E0128 15:59:27.916364 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47\": container with ID starting with ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47 not found: ID does not exist" containerID="ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.916404 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47"} err="failed to get container status \"ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47\": rpc error: code = NotFound desc = could not find container \"ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47\": container with ID starting with ff75d75497908366704f335d29154a9705ff5773c3cc380a03823f2b58ec3b47 not found: ID does not exist" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.916417 4811 scope.go:117] "RemoveContainer" containerID="09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab" Jan 28 15:59:27 crc kubenswrapper[4811]: E0128 15:59:27.916899 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab\": container with ID starting with 09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab not found: ID does not exist" containerID="09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.916929 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab"} err="failed to get container status \"09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab\": rpc error: code = NotFound desc = could not find container \"09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab\": container with ID starting with 09ddb5748325443c6808d1d07b245947fc0964508e7b32b8a02d64ef5324c2ab not found: ID does not exist" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.962451 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-catalog-content\") pod \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.962502 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84ldl\" (UniqueName: \"kubernetes.io/projected/adc13bee-20cc-4d9d-974e-80ff00a64a6a-kube-api-access-84ldl\") pod \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.962564 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-utilities\") pod \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\" (UID: \"adc13bee-20cc-4d9d-974e-80ff00a64a6a\") " Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.963548 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-utilities" (OuterVolumeSpecName: "utilities") pod "adc13bee-20cc-4d9d-974e-80ff00a64a6a" (UID: "adc13bee-20cc-4d9d-974e-80ff00a64a6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:59:27 crc kubenswrapper[4811]: I0128 15:59:27.970134 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc13bee-20cc-4d9d-974e-80ff00a64a6a-kube-api-access-84ldl" (OuterVolumeSpecName: "kube-api-access-84ldl") pod "adc13bee-20cc-4d9d-974e-80ff00a64a6a" (UID: "adc13bee-20cc-4d9d-974e-80ff00a64a6a"). InnerVolumeSpecName "kube-api-access-84ldl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.064250 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84ldl\" (UniqueName: \"kubernetes.io/projected/adc13bee-20cc-4d9d-974e-80ff00a64a6a-kube-api-access-84ldl\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.064297 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.093021 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "adc13bee-20cc-4d9d-974e-80ff00a64a6a" (UID: "adc13bee-20cc-4d9d-974e-80ff00a64a6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.165779 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc13bee-20cc-4d9d-974e-80ff00a64a6a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.168475 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gw62s"] Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.173002 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gw62s"] Jan 28 15:59:28 crc kubenswrapper[4811]: I0128 15:59:28.346983 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" path="/var/lib/kubelet/pods/adc13bee-20cc-4d9d-974e-80ff00a64a6a/volumes" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.146207 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-959qf"] Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.146680 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="registry-server" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.146691 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="registry-server" Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.146703 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="extract-content" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.146709 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="extract-content" Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.146722 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="extract-utilities" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.146729 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="extract-utilities" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.146820 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc13bee-20cc-4d9d-974e-80ff00a64a6a" containerName="registry-server" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.147320 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.149221 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-82gd7" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.155367 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.156072 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.160673 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.160776 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-959qf"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.167208 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-p8pwn"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.167862 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.193346 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.283265 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.284105 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.286579 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.286715 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-hcvrl" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.286838 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.298425 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323016 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-ovs-socket\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323073 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rxjp\" (UniqueName: \"kubernetes.io/projected/51327a90-d59d-4d64-899c-5ba26ad301c3-kube-api-access-2rxjp\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323101 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htqxs\" (UniqueName: \"kubernetes.io/projected/0f288174-12e8-4627-9106-3f96f5368b58-kube-api-access-htqxs\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323219 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f288174-12e8-4627-9106-3f96f5368b58-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323285 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-nmstate-lock\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323364 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbw6q\" (UniqueName: \"kubernetes.io/projected/a1a9218c-ccf7-4ded-812b-6d3fe2db137c-kube-api-access-vbw6q\") pod \"nmstate-metrics-54757c584b-959qf\" (UID: \"a1a9218c-ccf7-4ded-812b-6d3fe2db137c\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.323390 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-dbus-socket\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.424962 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-ovs-socket\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425026 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425053 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rxjp\" (UniqueName: \"kubernetes.io/projected/51327a90-d59d-4d64-899c-5ba26ad301c3-kube-api-access-2rxjp\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425069 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425094 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htqxs\" (UniqueName: \"kubernetes.io/projected/0f288174-12e8-4627-9106-3f96f5368b58-kube-api-access-htqxs\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425113 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f288174-12e8-4627-9106-3f96f5368b58-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425107 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-ovs-socket\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425139 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-nmstate-lock\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.425270 4811 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425283 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbw6q\" (UniqueName: \"kubernetes.io/projected/a1a9218c-ccf7-4ded-812b-6d3fe2db137c-kube-api-access-vbw6q\") pod \"nmstate-metrics-54757c584b-959qf\" (UID: \"a1a9218c-ccf7-4ded-812b-6d3fe2db137c\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425307 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-nmstate-lock\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.425317 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f288174-12e8-4627-9106-3f96f5368b58-tls-key-pair podName:0f288174-12e8-4627-9106-3f96f5368b58 nodeName:}" failed. No retries permitted until 2026-01-28 15:59:32.925299014 +0000 UTC m=+865.679662597 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/0f288174-12e8-4627-9106-3f96f5368b58-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-fqkmg" (UID: "0f288174-12e8-4627-9106-3f96f5368b58") : secret "openshift-nmstate-webhook" not found Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425374 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-dbus-socket\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425456 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4jtb\" (UniqueName: \"kubernetes.io/projected/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-kube-api-access-k4jtb\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.425668 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/51327a90-d59d-4d64-899c-5ba26ad301c3-dbus-socket\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.443287 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htqxs\" (UniqueName: \"kubernetes.io/projected/0f288174-12e8-4627-9106-3f96f5368b58-kube-api-access-htqxs\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.449166 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rxjp\" (UniqueName: \"kubernetes.io/projected/51327a90-d59d-4d64-899c-5ba26ad301c3-kube-api-access-2rxjp\") pod \"nmstate-handler-p8pwn\" (UID: \"51327a90-d59d-4d64-899c-5ba26ad301c3\") " pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.449427 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbw6q\" (UniqueName: \"kubernetes.io/projected/a1a9218c-ccf7-4ded-812b-6d3fe2db137c-kube-api-access-vbw6q\") pod \"nmstate-metrics-54757c584b-959qf\" (UID: \"a1a9218c-ccf7-4ded-812b-6d3fe2db137c\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.462409 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-969d5cc7f-m65vt"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.463295 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.470204 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.494697 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-969d5cc7f-m65vt"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.522682 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.526580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4jtb\" (UniqueName: \"kubernetes.io/projected/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-kube-api-access-k4jtb\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.526661 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.526692 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.527169 4811 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Jan 28 15:59:32 crc kubenswrapper[4811]: E0128 15:59:32.527232 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-plugin-serving-cert podName:742554e2-f5ea-4701-b8cf-c81f6dd0ae1b nodeName:}" failed. No retries permitted until 2026-01-28 15:59:33.027214369 +0000 UTC m=+865.781577952 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-ncskp" (UID: "742554e2-f5ea-4701-b8cf-c81f6dd0ae1b") : secret "plugin-serving-cert" not found Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.527614 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.551094 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4jtb\" (UniqueName: \"kubernetes.io/projected/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-kube-api-access-k4jtb\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:32 crc kubenswrapper[4811]: W0128 15:59:32.568999 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51327a90_d59d_4d64_899c_5ba26ad301c3.slice/crio-a79739a7dcb20648408d2ccb3ebc17c3f8cc15b93923c2e25ac757b89d5204fc WatchSource:0}: Error finding container a79739a7dcb20648408d2ccb3ebc17c3f8cc15b93923c2e25ac757b89d5204fc: Status 404 returned error can't find the container with id a79739a7dcb20648408d2ccb3ebc17c3f8cc15b93923c2e25ac757b89d5204fc Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628112 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-oauth-config\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628165 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-config\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-service-ca\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628217 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-serving-cert\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628593 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxfn4\" (UniqueName: \"kubernetes.io/projected/1a4fa8eb-643b-41af-9448-b229278bfbd5-kube-api-access-gxfn4\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628769 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-trusted-ca-bundle\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.628801 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-oauth-serving-cert\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.707667 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-959qf"] Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.730532 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxfn4\" (UniqueName: \"kubernetes.io/projected/1a4fa8eb-643b-41af-9448-b229278bfbd5-kube-api-access-gxfn4\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.730638 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-trusted-ca-bundle\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.730686 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-oauth-serving-cert\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.730774 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-oauth-config\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.730810 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-config\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.731031 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-service-ca\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.731857 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-config\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.732000 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-serving-cert\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.732240 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-trusted-ca-bundle\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.732839 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-oauth-serving-cert\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.732776 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a4fa8eb-643b-41af-9448-b229278bfbd5-service-ca\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.735727 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-oauth-config\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.735831 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1a4fa8eb-643b-41af-9448-b229278bfbd5-console-serving-cert\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.747704 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxfn4\" (UniqueName: \"kubernetes.io/projected/1a4fa8eb-643b-41af-9448-b229278bfbd5-kube-api-access-gxfn4\") pod \"console-969d5cc7f-m65vt\" (UID: \"1a4fa8eb-643b-41af-9448-b229278bfbd5\") " pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.833263 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.872138 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" event={"ID":"a1a9218c-ccf7-4ded-812b-6d3fe2db137c","Type":"ContainerStarted","Data":"b1bd97c196ee1c87ca7aed3adea9fd523832ebe861a44497674790128ac87326"} Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.874261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-p8pwn" event={"ID":"51327a90-d59d-4d64-899c-5ba26ad301c3","Type":"ContainerStarted","Data":"a79739a7dcb20648408d2ccb3ebc17c3f8cc15b93923c2e25ac757b89d5204fc"} Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.935158 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f288174-12e8-4627-9106-3f96f5368b58-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:32 crc kubenswrapper[4811]: I0128 15:59:32.938916 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f288174-12e8-4627-9106-3f96f5368b58-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-fqkmg\" (UID: \"0f288174-12e8-4627-9106-3f96f5368b58\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.037228 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.039860 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/742554e2-f5ea-4701-b8cf-c81f6dd0ae1b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ncskp\" (UID: \"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.068749 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-969d5cc7f-m65vt"] Jan 28 15:59:33 crc kubenswrapper[4811]: W0128 15:59:33.074466 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a4fa8eb_643b_41af_9448_b229278bfbd5.slice/crio-01968f47f6fc53a4f2cd9734fd6df133bab114c23620a08fa044a52f824985e7 WatchSource:0}: Error finding container 01968f47f6fc53a4f2cd9734fd6df133bab114c23620a08fa044a52f824985e7: Status 404 returned error can't find the container with id 01968f47f6fc53a4f2cd9734fd6df133bab114c23620a08fa044a52f824985e7 Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.087826 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.198963 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.265258 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg"] Jan 28 15:59:33 crc kubenswrapper[4811]: W0128 15:59:33.270290 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f288174_12e8_4627_9106_3f96f5368b58.slice/crio-795e22e2e9475770079b60e81390495cf5675f68a1352d8d2e54cd4e64f41e22 WatchSource:0}: Error finding container 795e22e2e9475770079b60e81390495cf5675f68a1352d8d2e54cd4e64f41e22: Status 404 returned error can't find the container with id 795e22e2e9475770079b60e81390495cf5675f68a1352d8d2e54cd4e64f41e22 Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.394826 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp"] Jan 28 15:59:33 crc kubenswrapper[4811]: W0128 15:59:33.401496 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod742554e2_f5ea_4701_b8cf_c81f6dd0ae1b.slice/crio-5348bf3d8da189fbe5d9cd80feb653c60c8299f9256fb795fed727ab6bd8d8d9 WatchSource:0}: Error finding container 5348bf3d8da189fbe5d9cd80feb653c60c8299f9256fb795fed727ab6bd8d8d9: Status 404 returned error can't find the container with id 5348bf3d8da189fbe5d9cd80feb653c60c8299f9256fb795fed727ab6bd8d8d9 Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.888160 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-969d5cc7f-m65vt" event={"ID":"1a4fa8eb-643b-41af-9448-b229278bfbd5","Type":"ContainerStarted","Data":"4c94b15f88425b909479b2605280e4ad6a22e6c9112dd9350919349767a08719"} Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.888555 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-969d5cc7f-m65vt" event={"ID":"1a4fa8eb-643b-41af-9448-b229278bfbd5","Type":"ContainerStarted","Data":"01968f47f6fc53a4f2cd9734fd6df133bab114c23620a08fa044a52f824985e7"} Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.892996 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" event={"ID":"0f288174-12e8-4627-9106-3f96f5368b58","Type":"ContainerStarted","Data":"795e22e2e9475770079b60e81390495cf5675f68a1352d8d2e54cd4e64f41e22"} Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.893694 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" event={"ID":"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b","Type":"ContainerStarted","Data":"5348bf3d8da189fbe5d9cd80feb653c60c8299f9256fb795fed727ab6bd8d8d9"} Jan 28 15:59:33 crc kubenswrapper[4811]: I0128 15:59:33.909580 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-969d5cc7f-m65vt" podStartSLOduration=1.909536192 podStartE2EDuration="1.909536192s" podCreationTimestamp="2026-01-28 15:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 15:59:33.905816163 +0000 UTC m=+866.660179756" watchObservedRunningTime="2026-01-28 15:59:33.909536192 +0000 UTC m=+866.663899775" Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.908015 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-p8pwn" event={"ID":"51327a90-d59d-4d64-899c-5ba26ad301c3","Type":"ContainerStarted","Data":"e04495fc53b4241d12a9fd5d6f385194402dbdc6e6b5c1961ac320f20074b8fd"} Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.908403 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.912494 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" event={"ID":"0f288174-12e8-4627-9106-3f96f5368b58","Type":"ContainerStarted","Data":"7ba6d38e84398c2d6e6bc5a0ef368440816e34efe112dc13661596958527c019"} Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.912706 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.914666 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" event={"ID":"a1a9218c-ccf7-4ded-812b-6d3fe2db137c","Type":"ContainerStarted","Data":"0429817a11c0785b4ffea760b6d25697e1b17106fe1f88e81513b9076505f1b1"} Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.926520 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-p8pwn" podStartSLOduration=0.96088937 podStartE2EDuration="3.92649335s" podCreationTimestamp="2026-01-28 15:59:32 +0000 UTC" firstStartedPulling="2026-01-28 15:59:32.571375477 +0000 UTC m=+865.325739060" lastFinishedPulling="2026-01-28 15:59:35.536979457 +0000 UTC m=+868.291343040" observedRunningTime="2026-01-28 15:59:35.923152671 +0000 UTC m=+868.677516264" watchObservedRunningTime="2026-01-28 15:59:35.92649335 +0000 UTC m=+868.680856933" Jan 28 15:59:35 crc kubenswrapper[4811]: I0128 15:59:35.941883 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" podStartSLOduration=1.63988179 podStartE2EDuration="3.941851717s" podCreationTimestamp="2026-01-28 15:59:32 +0000 UTC" firstStartedPulling="2026-01-28 15:59:33.272778869 +0000 UTC m=+866.027142462" lastFinishedPulling="2026-01-28 15:59:35.574748806 +0000 UTC m=+868.329112389" observedRunningTime="2026-01-28 15:59:35.940093259 +0000 UTC m=+868.694456862" watchObservedRunningTime="2026-01-28 15:59:35.941851717 +0000 UTC m=+868.696215300" Jan 28 15:59:36 crc kubenswrapper[4811]: I0128 15:59:36.925295 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" event={"ID":"742554e2-f5ea-4701-b8cf-c81f6dd0ae1b","Type":"ContainerStarted","Data":"d3f38cb734a01ad036632125c372a5bfab4451d6c8f1b054f8251923e390ebad"} Jan 28 15:59:36 crc kubenswrapper[4811]: I0128 15:59:36.953728 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ncskp" podStartSLOduration=1.644278265 podStartE2EDuration="4.953708269s" podCreationTimestamp="2026-01-28 15:59:32 +0000 UTC" firstStartedPulling="2026-01-28 15:59:33.403650391 +0000 UTC m=+866.158013974" lastFinishedPulling="2026-01-28 15:59:36.713080395 +0000 UTC m=+869.467443978" observedRunningTime="2026-01-28 15:59:36.950159036 +0000 UTC m=+869.704522619" watchObservedRunningTime="2026-01-28 15:59:36.953708269 +0000 UTC m=+869.708071852" Jan 28 15:59:37 crc kubenswrapper[4811]: I0128 15:59:37.934853 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" event={"ID":"a1a9218c-ccf7-4ded-812b-6d3fe2db137c","Type":"ContainerStarted","Data":"a35f78b04eea7509a778cfd3dd53ffe418bf86f0835ecfcf077ded5c21b998f1"} Jan 28 15:59:37 crc kubenswrapper[4811]: I0128 15:59:37.956750 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-959qf" podStartSLOduration=0.966592642 podStartE2EDuration="5.95673103s" podCreationTimestamp="2026-01-28 15:59:32 +0000 UTC" firstStartedPulling="2026-01-28 15:59:32.715558671 +0000 UTC m=+865.469922244" lastFinishedPulling="2026-01-28 15:59:37.705697049 +0000 UTC m=+870.460060632" observedRunningTime="2026-01-28 15:59:37.952515908 +0000 UTC m=+870.706879501" watchObservedRunningTime="2026-01-28 15:59:37.95673103 +0000 UTC m=+870.711094613" Jan 28 15:59:42 crc kubenswrapper[4811]: I0128 15:59:42.544007 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-p8pwn" Jan 28 15:59:42 crc kubenswrapper[4811]: I0128 15:59:42.834459 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:42 crc kubenswrapper[4811]: I0128 15:59:42.834527 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:42 crc kubenswrapper[4811]: I0128 15:59:42.839335 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:42 crc kubenswrapper[4811]: I0128 15:59:42.966478 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-969d5cc7f-m65vt" Jan 28 15:59:43 crc kubenswrapper[4811]: I0128 15:59:43.032715 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xcx2z"] Jan 28 15:59:53 crc kubenswrapper[4811]: I0128 15:59:53.095765 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-fqkmg" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.156544 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf"] Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.158899 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.163715 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.163929 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.195787 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf"] Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.214253 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-secret-volume\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.214311 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcjw2\" (UniqueName: \"kubernetes.io/projected/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-kube-api-access-mcjw2\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.214370 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-config-volume\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.315352 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-config-volume\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.315468 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-secret-volume\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.315495 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcjw2\" (UniqueName: \"kubernetes.io/projected/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-kube-api-access-mcjw2\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.316919 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-config-volume\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.329828 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-secret-volume\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.331147 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcjw2\" (UniqueName: \"kubernetes.io/projected/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-kube-api-access-mcjw2\") pod \"collect-profiles-29493600-f95tf\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.514754 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:00 crc kubenswrapper[4811]: I0128 16:00:00.703519 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf"] Jan 28 16:00:01 crc kubenswrapper[4811]: I0128 16:00:01.070780 4811 generic.go:334] "Generic (PLEG): container finished" podID="2522f10c-03a4-43b9-8cd6-ce43816ca0d9" containerID="4a3237493d2178658083297fb69845bf88432664a6d30ef70cbd83d69d4bcf39" exitCode=0 Jan 28 16:00:01 crc kubenswrapper[4811]: I0128 16:00:01.070840 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" event={"ID":"2522f10c-03a4-43b9-8cd6-ce43816ca0d9","Type":"ContainerDied","Data":"4a3237493d2178658083297fb69845bf88432664a6d30ef70cbd83d69d4bcf39"} Jan 28 16:00:01 crc kubenswrapper[4811]: I0128 16:00:01.071080 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" event={"ID":"2522f10c-03a4-43b9-8cd6-ce43816ca0d9","Type":"ContainerStarted","Data":"4643b3e61f8f64b101f9cb9552b21536a28ac431b4d69e7d77772e2de490734f"} Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.337316 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.443389 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-secret-volume\") pod \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.443951 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-config-volume\") pod \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.444021 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcjw2\" (UniqueName: \"kubernetes.io/projected/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-kube-api-access-mcjw2\") pod \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\" (UID: \"2522f10c-03a4-43b9-8cd6-ce43816ca0d9\") " Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.446101 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-config-volume" (OuterVolumeSpecName: "config-volume") pod "2522f10c-03a4-43b9-8cd6-ce43816ca0d9" (UID: "2522f10c-03a4-43b9-8cd6-ce43816ca0d9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.450013 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-kube-api-access-mcjw2" (OuterVolumeSpecName: "kube-api-access-mcjw2") pod "2522f10c-03a4-43b9-8cd6-ce43816ca0d9" (UID: "2522f10c-03a4-43b9-8cd6-ce43816ca0d9"). InnerVolumeSpecName "kube-api-access-mcjw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.450674 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2522f10c-03a4-43b9-8cd6-ce43816ca0d9" (UID: "2522f10c-03a4-43b9-8cd6-ce43816ca0d9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.544815 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcjw2\" (UniqueName: \"kubernetes.io/projected/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-kube-api-access-mcjw2\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.544846 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:02 crc kubenswrapper[4811]: I0128 16:00:02.544859 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2522f10c-03a4-43b9-8cd6-ce43816ca0d9-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:03 crc kubenswrapper[4811]: I0128 16:00:03.085387 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" event={"ID":"2522f10c-03a4-43b9-8cd6-ce43816ca0d9","Type":"ContainerDied","Data":"4643b3e61f8f64b101f9cb9552b21536a28ac431b4d69e7d77772e2de490734f"} Jan 28 16:00:03 crc kubenswrapper[4811]: I0128 16:00:03.085707 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4643b3e61f8f64b101f9cb9552b21536a28ac431b4d69e7d77772e2de490734f" Jan 28 16:00:03 crc kubenswrapper[4811]: I0128 16:00:03.085704 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf" Jan 28 16:00:03 crc kubenswrapper[4811]: I0128 16:00:03.086889 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:00:03 crc kubenswrapper[4811]: I0128 16:00:03.086970 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.007339 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m"] Jan 28 16:00:07 crc kubenswrapper[4811]: E0128 16:00:07.008540 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2522f10c-03a4-43b9-8cd6-ce43816ca0d9" containerName="collect-profiles" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.008565 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2522f10c-03a4-43b9-8cd6-ce43816ca0d9" containerName="collect-profiles" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.008797 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2522f10c-03a4-43b9-8cd6-ce43816ca0d9" containerName="collect-profiles" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.010696 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.012256 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m"] Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.013546 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.106486 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.106549 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwp48\" (UniqueName: \"kubernetes.io/projected/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-kube-api-access-xwp48\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.106594 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.208347 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwp48\" (UniqueName: \"kubernetes.io/projected/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-kube-api-access-xwp48\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.208547 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.208660 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.209276 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.209325 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.240767 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwp48\" (UniqueName: \"kubernetes.io/projected/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-kube-api-access-xwp48\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.329646 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:07 crc kubenswrapper[4811]: I0128 16:00:07.559112 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m"] Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.078988 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-xcx2z" podUID="cfa466bf-9337-45cd-a739-c0d3b5521e13" containerName="console" containerID="cri-o://dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f" gracePeriod=15 Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.112251 4811 generic.go:334] "Generic (PLEG): container finished" podID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerID="750391fdff74cc7e70fbafa6f84ea9c4089d9b9f8f00f5af60bedcecf7edc0bb" exitCode=0 Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.112290 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" event={"ID":"97d2fe47-1c05-4300-a476-1bf42f0f6c5a","Type":"ContainerDied","Data":"750391fdff74cc7e70fbafa6f84ea9c4089d9b9f8f00f5af60bedcecf7edc0bb"} Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.112314 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" event={"ID":"97d2fe47-1c05-4300-a476-1bf42f0f6c5a","Type":"ContainerStarted","Data":"bff45e3d09720589fcbea337f447b85f9fa5dc1617af24d8568d1b9dddd02415"} Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.408386 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xcx2z_cfa466bf-9337-45cd-a739-c0d3b5521e13/console/0.log" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.408543 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.524641 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-oauth-config\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.525106 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-service-ca\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.525145 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwjps\" (UniqueName: \"kubernetes.io/projected/cfa466bf-9337-45cd-a739-c0d3b5521e13-kube-api-access-hwjps\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.525207 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-serving-cert\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.525248 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-oauth-serving-cert\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.525272 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-config\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.525376 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-trusted-ca-bundle\") pod \"cfa466bf-9337-45cd-a739-c0d3b5521e13\" (UID: \"cfa466bf-9337-45cd-a739-c0d3b5521e13\") " Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.526076 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-config" (OuterVolumeSpecName: "console-config") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.526104 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-service-ca" (OuterVolumeSpecName: "service-ca") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.526123 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.526132 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.527222 4811 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.527266 4811 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.527286 4811 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.527304 4811 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa466bf-9337-45cd-a739-c0d3b5521e13-service-ca\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.531032 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfa466bf-9337-45cd-a739-c0d3b5521e13-kube-api-access-hwjps" (OuterVolumeSpecName: "kube-api-access-hwjps") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "kube-api-access-hwjps". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.531345 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.538036 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "cfa466bf-9337-45cd-a739-c0d3b5521e13" (UID: "cfa466bf-9337-45cd-a739-c0d3b5521e13"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.628324 4811 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.628376 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwjps\" (UniqueName: \"kubernetes.io/projected/cfa466bf-9337-45cd-a739-c0d3b5521e13-kube-api-access-hwjps\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:08 crc kubenswrapper[4811]: I0128 16:00:08.628396 4811 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa466bf-9337-45cd-a739-c0d3b5521e13-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.121999 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xcx2z_cfa466bf-9337-45cd-a739-c0d3b5521e13/console/0.log" Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.122060 4811 generic.go:334] "Generic (PLEG): container finished" podID="cfa466bf-9337-45cd-a739-c0d3b5521e13" containerID="dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f" exitCode=2 Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.122098 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xcx2z" event={"ID":"cfa466bf-9337-45cd-a739-c0d3b5521e13","Type":"ContainerDied","Data":"dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f"} Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.122127 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xcx2z" event={"ID":"cfa466bf-9337-45cd-a739-c0d3b5521e13","Type":"ContainerDied","Data":"69198bcacb227f8d3e6450f96cb168ed9cc090da6ba286858cb5d6dcd16a4722"} Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.122150 4811 scope.go:117] "RemoveContainer" containerID="dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f" Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.122284 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xcx2z" Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.176837 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xcx2z"] Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.183828 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-xcx2z"] Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.208492 4811 scope.go:117] "RemoveContainer" containerID="dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f" Jan 28 16:00:09 crc kubenswrapper[4811]: E0128 16:00:09.209219 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f\": container with ID starting with dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f not found: ID does not exist" containerID="dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f" Jan 28 16:00:09 crc kubenswrapper[4811]: I0128 16:00:09.209254 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f"} err="failed to get container status \"dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f\": rpc error: code = NotFound desc = could not find container \"dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f\": container with ID starting with dbc20bcc218f6790f976ede91884bc3ef00f630b1869bc9a8479eead9a54b72f not found: ID does not exist" Jan 28 16:00:10 crc kubenswrapper[4811]: I0128 16:00:10.139859 4811 generic.go:334] "Generic (PLEG): container finished" podID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerID="2c38938c398f4902b9e1af7e6d186a873e0ddbadb8f218bee302f103d1a8b0f0" exitCode=0 Jan 28 16:00:10 crc kubenswrapper[4811]: I0128 16:00:10.140063 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" event={"ID":"97d2fe47-1c05-4300-a476-1bf42f0f6c5a","Type":"ContainerDied","Data":"2c38938c398f4902b9e1af7e6d186a873e0ddbadb8f218bee302f103d1a8b0f0"} Jan 28 16:00:10 crc kubenswrapper[4811]: I0128 16:00:10.347958 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfa466bf-9337-45cd-a739-c0d3b5521e13" path="/var/lib/kubelet/pods/cfa466bf-9337-45cd-a739-c0d3b5521e13/volumes" Jan 28 16:00:11 crc kubenswrapper[4811]: I0128 16:00:11.149337 4811 generic.go:334] "Generic (PLEG): container finished" podID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerID="3e4295016391d368e1e94813043d05a969c2b0e242493f69946bf13ede398cdf" exitCode=0 Jan 28 16:00:11 crc kubenswrapper[4811]: I0128 16:00:11.149386 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" event={"ID":"97d2fe47-1c05-4300-a476-1bf42f0f6c5a","Type":"ContainerDied","Data":"3e4295016391d368e1e94813043d05a969c2b0e242493f69946bf13ede398cdf"} Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.428206 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.584532 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-util\") pod \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.584592 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-bundle\") pod \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.584688 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwp48\" (UniqueName: \"kubernetes.io/projected/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-kube-api-access-xwp48\") pod \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\" (UID: \"97d2fe47-1c05-4300-a476-1bf42f0f6c5a\") " Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.586251 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-bundle" (OuterVolumeSpecName: "bundle") pod "97d2fe47-1c05-4300-a476-1bf42f0f6c5a" (UID: "97d2fe47-1c05-4300-a476-1bf42f0f6c5a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.591565 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-kube-api-access-xwp48" (OuterVolumeSpecName: "kube-api-access-xwp48") pod "97d2fe47-1c05-4300-a476-1bf42f0f6c5a" (UID: "97d2fe47-1c05-4300-a476-1bf42f0f6c5a"). InnerVolumeSpecName "kube-api-access-xwp48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.598139 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-util" (OuterVolumeSpecName: "util") pod "97d2fe47-1c05-4300-a476-1bf42f0f6c5a" (UID: "97d2fe47-1c05-4300-a476-1bf42f0f6c5a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.685940 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwp48\" (UniqueName: \"kubernetes.io/projected/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-kube-api-access-xwp48\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.686325 4811 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-util\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:12 crc kubenswrapper[4811]: I0128 16:00:12.686336 4811 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/97d2fe47-1c05-4300-a476-1bf42f0f6c5a-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:00:13 crc kubenswrapper[4811]: I0128 16:00:13.168624 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" event={"ID":"97d2fe47-1c05-4300-a476-1bf42f0f6c5a","Type":"ContainerDied","Data":"bff45e3d09720589fcbea337f447b85f9fa5dc1617af24d8568d1b9dddd02415"} Jan 28 16:00:13 crc kubenswrapper[4811]: I0128 16:00:13.168701 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bff45e3d09720589fcbea337f447b85f9fa5dc1617af24d8568d1b9dddd02415" Jan 28 16:00:13 crc kubenswrapper[4811]: I0128 16:00:13.168779 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.250843 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh"] Jan 28 16:00:21 crc kubenswrapper[4811]: E0128 16:00:21.251424 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfa466bf-9337-45cd-a739-c0d3b5521e13" containerName="console" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.251458 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfa466bf-9337-45cd-a739-c0d3b5521e13" containerName="console" Jan 28 16:00:21 crc kubenswrapper[4811]: E0128 16:00:21.251475 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="util" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.251483 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="util" Jan 28 16:00:21 crc kubenswrapper[4811]: E0128 16:00:21.251500 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="pull" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.251508 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="pull" Jan 28 16:00:21 crc kubenswrapper[4811]: E0128 16:00:21.251517 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="extract" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.251524 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="extract" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.251641 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfa466bf-9337-45cd-a739-c0d3b5521e13" containerName="console" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.251653 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="97d2fe47-1c05-4300-a476-1bf42f0f6c5a" containerName="extract" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.252089 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.254905 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.255254 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.255589 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.255802 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.256013 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-x8s7p" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.270087 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh"] Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.403608 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a123b5e8-6e64-4200-8a29-e49d7402cf8f-apiservice-cert\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.403663 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a123b5e8-6e64-4200-8a29-e49d7402cf8f-webhook-cert\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.403721 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d89l\" (UniqueName: \"kubernetes.io/projected/a123b5e8-6e64-4200-8a29-e49d7402cf8f-kube-api-access-4d89l\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.505362 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d89l\" (UniqueName: \"kubernetes.io/projected/a123b5e8-6e64-4200-8a29-e49d7402cf8f-kube-api-access-4d89l\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.505703 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a123b5e8-6e64-4200-8a29-e49d7402cf8f-apiservice-cert\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.505784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a123b5e8-6e64-4200-8a29-e49d7402cf8f-webhook-cert\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.512244 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a123b5e8-6e64-4200-8a29-e49d7402cf8f-webhook-cert\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.513917 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a123b5e8-6e64-4200-8a29-e49d7402cf8f-apiservice-cert\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.541292 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d89l\" (UniqueName: \"kubernetes.io/projected/a123b5e8-6e64-4200-8a29-e49d7402cf8f-kube-api-access-4d89l\") pod \"metallb-operator-controller-manager-6fffdf7c57-wb2qh\" (UID: \"a123b5e8-6e64-4200-8a29-e49d7402cf8f\") " pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.565696 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.588461 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b"] Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.589344 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.591144 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-792mz" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.591394 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.591973 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.609509 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b"] Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.709960 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/78d59be8-2ce7-49b2-80ed-42ec32101487-apiservice-cert\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.710023 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/78d59be8-2ce7-49b2-80ed-42ec32101487-webhook-cert\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.710076 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hflqf\" (UniqueName: \"kubernetes.io/projected/78d59be8-2ce7-49b2-80ed-42ec32101487-kube-api-access-hflqf\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.805586 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh"] Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.810960 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hflqf\" (UniqueName: \"kubernetes.io/projected/78d59be8-2ce7-49b2-80ed-42ec32101487-kube-api-access-hflqf\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.811072 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/78d59be8-2ce7-49b2-80ed-42ec32101487-apiservice-cert\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.811103 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/78d59be8-2ce7-49b2-80ed-42ec32101487-webhook-cert\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.816331 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/78d59be8-2ce7-49b2-80ed-42ec32101487-apiservice-cert\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.816364 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/78d59be8-2ce7-49b2-80ed-42ec32101487-webhook-cert\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.833336 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hflqf\" (UniqueName: \"kubernetes.io/projected/78d59be8-2ce7-49b2-80ed-42ec32101487-kube-api-access-hflqf\") pod \"metallb-operator-webhook-server-75fc857c96-mkr4b\" (UID: \"78d59be8-2ce7-49b2-80ed-42ec32101487\") " pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:21 crc kubenswrapper[4811]: I0128 16:00:21.935791 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:22 crc kubenswrapper[4811]: I0128 16:00:22.135124 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b"] Jan 28 16:00:22 crc kubenswrapper[4811]: W0128 16:00:22.137785 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78d59be8_2ce7_49b2_80ed_42ec32101487.slice/crio-449504e917d49b4a14071e0f3db16ecfdd6dbec1a9be57707d4ea344a74fa454 WatchSource:0}: Error finding container 449504e917d49b4a14071e0f3db16ecfdd6dbec1a9be57707d4ea344a74fa454: Status 404 returned error can't find the container with id 449504e917d49b4a14071e0f3db16ecfdd6dbec1a9be57707d4ea344a74fa454 Jan 28 16:00:22 crc kubenswrapper[4811]: I0128 16:00:22.212678 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" event={"ID":"a123b5e8-6e64-4200-8a29-e49d7402cf8f","Type":"ContainerStarted","Data":"c819caa389fb36b78d8fbaf3c0b5af0733c1ce2959724b52c68664e468b6e192"} Jan 28 16:00:22 crc kubenswrapper[4811]: I0128 16:00:22.213610 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" event={"ID":"78d59be8-2ce7-49b2-80ed-42ec32101487","Type":"ContainerStarted","Data":"449504e917d49b4a14071e0f3db16ecfdd6dbec1a9be57707d4ea344a74fa454"} Jan 28 16:00:26 crc kubenswrapper[4811]: I0128 16:00:26.262128 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" event={"ID":"a123b5e8-6e64-4200-8a29-e49d7402cf8f","Type":"ContainerStarted","Data":"ddcff7567dabdb95d0ea739564491fecb58eb8e90f3ff3dc6da39c3f3d7c10ac"} Jan 28 16:00:26 crc kubenswrapper[4811]: I0128 16:00:26.270176 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:00:26 crc kubenswrapper[4811]: I0128 16:00:26.291487 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" podStartSLOduration=1.51875991 podStartE2EDuration="5.291470592s" podCreationTimestamp="2026-01-28 16:00:21 +0000 UTC" firstStartedPulling="2026-01-28 16:00:21.816496026 +0000 UTC m=+914.570859609" lastFinishedPulling="2026-01-28 16:00:25.589206708 +0000 UTC m=+918.343570291" observedRunningTime="2026-01-28 16:00:26.287271139 +0000 UTC m=+919.041634722" watchObservedRunningTime="2026-01-28 16:00:26.291470592 +0000 UTC m=+919.045834175" Jan 28 16:00:28 crc kubenswrapper[4811]: I0128 16:00:28.276179 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" event={"ID":"78d59be8-2ce7-49b2-80ed-42ec32101487","Type":"ContainerStarted","Data":"ff27e68cccec4cb5663a0567ca1721f44ae651f5f286c716c2b4ee6d75f812ed"} Jan 28 16:00:28 crc kubenswrapper[4811]: I0128 16:00:28.276580 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:28 crc kubenswrapper[4811]: I0128 16:00:28.297536 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" podStartSLOduration=2.128237605 podStartE2EDuration="7.297517252s" podCreationTimestamp="2026-01-28 16:00:21 +0000 UTC" firstStartedPulling="2026-01-28 16:00:22.140195884 +0000 UTC m=+914.894559457" lastFinishedPulling="2026-01-28 16:00:27.309475531 +0000 UTC m=+920.063839104" observedRunningTime="2026-01-28 16:00:28.290809561 +0000 UTC m=+921.045173154" watchObservedRunningTime="2026-01-28 16:00:28.297517252 +0000 UTC m=+921.051880835" Jan 28 16:00:33 crc kubenswrapper[4811]: I0128 16:00:33.087428 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:00:33 crc kubenswrapper[4811]: I0128 16:00:33.087808 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:00:41 crc kubenswrapper[4811]: I0128 16:00:41.940482 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-75fc857c96-mkr4b" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.491540 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5shzv"] Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.493594 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.502909 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5shzv"] Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.563601 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-utilities\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.563916 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-catalog-content\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.563944 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v7rn\" (UniqueName: \"kubernetes.io/projected/5dbf7432-5c23-4105-83ea-64eff4879ff5-kube-api-access-4v7rn\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.664595 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-utilities\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.664642 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-catalog-content\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.664681 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v7rn\" (UniqueName: \"kubernetes.io/projected/5dbf7432-5c23-4105-83ea-64eff4879ff5-kube-api-access-4v7rn\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.665064 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-utilities\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.665096 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-catalog-content\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.684196 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v7rn\" (UniqueName: \"kubernetes.io/projected/5dbf7432-5c23-4105-83ea-64eff4879ff5-kube-api-access-4v7rn\") pod \"community-operators-5shzv\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:57 crc kubenswrapper[4811]: I0128 16:00:57.812598 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:00:58 crc kubenswrapper[4811]: I0128 16:00:58.108368 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5shzv"] Jan 28 16:00:58 crc kubenswrapper[4811]: I0128 16:00:58.448190 4811 generic.go:334] "Generic (PLEG): container finished" podID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerID="c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18" exitCode=0 Jan 28 16:00:58 crc kubenswrapper[4811]: I0128 16:00:58.448235 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5shzv" event={"ID":"5dbf7432-5c23-4105-83ea-64eff4879ff5","Type":"ContainerDied","Data":"c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18"} Jan 28 16:00:58 crc kubenswrapper[4811]: I0128 16:00:58.448257 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5shzv" event={"ID":"5dbf7432-5c23-4105-83ea-64eff4879ff5","Type":"ContainerStarted","Data":"6cb6cb28dbd050523617191e4aa299a5e5bfba8a122298f3e606ff4d62f31052"} Jan 28 16:01:00 crc kubenswrapper[4811]: I0128 16:01:00.465035 4811 generic.go:334] "Generic (PLEG): container finished" podID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerID="9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094" exitCode=0 Jan 28 16:01:00 crc kubenswrapper[4811]: I0128 16:01:00.465121 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5shzv" event={"ID":"5dbf7432-5c23-4105-83ea-64eff4879ff5","Type":"ContainerDied","Data":"9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094"} Jan 28 16:01:01 crc kubenswrapper[4811]: I0128 16:01:01.473033 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5shzv" event={"ID":"5dbf7432-5c23-4105-83ea-64eff4879ff5","Type":"ContainerStarted","Data":"eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9"} Jan 28 16:01:01 crc kubenswrapper[4811]: I0128 16:01:01.491016 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5shzv" podStartSLOduration=2.025274634 podStartE2EDuration="4.491000395s" podCreationTimestamp="2026-01-28 16:00:57 +0000 UTC" firstStartedPulling="2026-01-28 16:00:58.449541378 +0000 UTC m=+951.203904971" lastFinishedPulling="2026-01-28 16:01:00.915267139 +0000 UTC m=+953.669630732" observedRunningTime="2026-01-28 16:01:01.488653842 +0000 UTC m=+954.243017445" watchObservedRunningTime="2026-01-28 16:01:01.491000395 +0000 UTC m=+954.245363978" Jan 28 16:01:01 crc kubenswrapper[4811]: I0128 16:01:01.568422 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6fffdf7c57-wb2qh" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.326304 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-jn9r9"] Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.330014 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.333203 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5"] Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.333466 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-bnpfq" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.333730 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.333739 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.334244 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.340142 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.359948 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5"] Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.431791 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pphzg\" (UniqueName: \"kubernetes.io/projected/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-kube-api-access-pphzg\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.431835 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-sockets\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.431859 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-startup\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.432444 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-reloader\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.432475 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-metrics\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.432495 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-metrics-certs\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.432585 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-conf\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.448582 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-642th"] Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.449401 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.451684 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.452194 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-qwgzs" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.452338 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.452793 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.473216 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-br9ll"] Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.474549 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.478769 4811 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.497216 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-br9ll"] Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534385 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-cert\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534666 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/28b5a61d-bf80-4508-bb79-a6791d1c6d57-metallb-excludel2\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534688 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-conf\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534723 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-metrics-certs\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534761 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pphzg\" (UniqueName: \"kubernetes.io/projected/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-kube-api-access-pphzg\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534782 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-sockets\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534814 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-startup\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534830 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmzgm\" (UniqueName: \"kubernetes.io/projected/0039a23f-73c0-40c3-9c79-3d747d7d9896-kube-api-access-bmzgm\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534850 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-metrics-certs\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534869 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-reloader\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534892 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f1bfcb26-2369-429c-bf4d-1c4234d9c28c-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-kxvh5\" (UID: \"f1bfcb26-2369-429c-bf4d-1c4234d9c28c\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534910 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-metrics\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534929 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534951 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lphbn\" (UniqueName: \"kubernetes.io/projected/f1bfcb26-2369-429c-bf4d-1c4234d9c28c-kube-api-access-lphbn\") pod \"frr-k8s-webhook-server-7df86c4f6c-kxvh5\" (UID: \"f1bfcb26-2369-429c-bf4d-1c4234d9c28c\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534974 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-metrics-certs\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.534991 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8dwm\" (UniqueName: \"kubernetes.io/projected/28b5a61d-bf80-4508-bb79-a6791d1c6d57-kube-api-access-s8dwm\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.535406 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-conf\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.536226 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-sockets\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.537327 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-frr-startup\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.537574 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-reloader\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.537758 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-metrics\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.548120 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-metrics-certs\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.550026 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pphzg\" (UniqueName: \"kubernetes.io/projected/65b6bd46-8c0d-449a-a7b6-42c8c7933f64-kube-api-access-pphzg\") pod \"frr-k8s-jn9r9\" (UID: \"65b6bd46-8c0d-449a-a7b6-42c8c7933f64\") " pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.635753 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f1bfcb26-2369-429c-bf4d-1c4234d9c28c-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-kxvh5\" (UID: \"f1bfcb26-2369-429c-bf4d-1c4234d9c28c\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.635812 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.635841 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lphbn\" (UniqueName: \"kubernetes.io/projected/f1bfcb26-2369-429c-bf4d-1c4234d9c28c-kube-api-access-lphbn\") pod \"frr-k8s-webhook-server-7df86c4f6c-kxvh5\" (UID: \"f1bfcb26-2369-429c-bf4d-1c4234d9c28c\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.635873 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8dwm\" (UniqueName: \"kubernetes.io/projected/28b5a61d-bf80-4508-bb79-a6791d1c6d57-kube-api-access-s8dwm\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.635935 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-cert\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.635964 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/28b5a61d-bf80-4508-bb79-a6791d1c6d57-metallb-excludel2\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.636006 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-metrics-certs\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: E0128 16:01:02.636011 4811 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.636069 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmzgm\" (UniqueName: \"kubernetes.io/projected/0039a23f-73c0-40c3-9c79-3d747d7d9896-kube-api-access-bmzgm\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: E0128 16:01:02.636102 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist podName:28b5a61d-bf80-4508-bb79-a6791d1c6d57 nodeName:}" failed. No retries permitted until 2026-01-28 16:01:03.136078286 +0000 UTC m=+955.890441889 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist") pod "speaker-642th" (UID: "28b5a61d-bf80-4508-bb79-a6791d1c6d57") : secret "metallb-memberlist" not found Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.636133 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-metrics-certs\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: E0128 16:01:02.636239 4811 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 28 16:01:02 crc kubenswrapper[4811]: E0128 16:01:02.636320 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-metrics-certs podName:0039a23f-73c0-40c3-9c79-3d747d7d9896 nodeName:}" failed. No retries permitted until 2026-01-28 16:01:03.136297282 +0000 UTC m=+955.890660915 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-metrics-certs") pod "controller-6968d8fdc4-br9ll" (UID: "0039a23f-73c0-40c3-9c79-3d747d7d9896") : secret "controller-certs-secret" not found Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.637025 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/28b5a61d-bf80-4508-bb79-a6791d1c6d57-metallb-excludel2\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.639155 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-metrics-certs\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.641414 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f1bfcb26-2369-429c-bf4d-1c4234d9c28c-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-kxvh5\" (UID: \"f1bfcb26-2369-429c-bf4d-1c4234d9c28c\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.653885 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.654075 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-cert\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.658931 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lphbn\" (UniqueName: \"kubernetes.io/projected/f1bfcb26-2369-429c-bf4d-1c4234d9c28c-kube-api-access-lphbn\") pod \"frr-k8s-webhook-server-7df86c4f6c-kxvh5\" (UID: \"f1bfcb26-2369-429c-bf4d-1c4234d9c28c\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.660992 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.667222 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8dwm\" (UniqueName: \"kubernetes.io/projected/28b5a61d-bf80-4508-bb79-a6791d1c6d57-kube-api-access-s8dwm\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.676563 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmzgm\" (UniqueName: \"kubernetes.io/projected/0039a23f-73c0-40c3-9c79-3d747d7d9896-kube-api-access-bmzgm\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:02 crc kubenswrapper[4811]: I0128 16:01:02.870638 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5"] Jan 28 16:01:02 crc kubenswrapper[4811]: W0128 16:01:02.874934 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1bfcb26_2369_429c_bf4d_1c4234d9c28c.slice/crio-71ae5cbd672872971a776a8dc722f187d38f89879d8978aa56f2b7a5ea867bd4 WatchSource:0}: Error finding container 71ae5cbd672872971a776a8dc722f187d38f89879d8978aa56f2b7a5ea867bd4: Status 404 returned error can't find the container with id 71ae5cbd672872971a776a8dc722f187d38f89879d8978aa56f2b7a5ea867bd4 Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.087514 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.087583 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.087628 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.088088 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eabdb53100645a7db124d74211393e44eb29247556832b21f6981f388ac52c1c"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.088153 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://eabdb53100645a7db124d74211393e44eb29247556832b21f6981f388ac52c1c" gracePeriod=600 Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.148297 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.148484 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-metrics-certs\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:03 crc kubenswrapper[4811]: E0128 16:01:03.148518 4811 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 28 16:01:03 crc kubenswrapper[4811]: E0128 16:01:03.148594 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist podName:28b5a61d-bf80-4508-bb79-a6791d1c6d57 nodeName:}" failed. No retries permitted until 2026-01-28 16:01:04.148575119 +0000 UTC m=+956.902938712 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist") pod "speaker-642th" (UID: "28b5a61d-bf80-4508-bb79-a6791d1c6d57") : secret "metallb-memberlist" not found Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.153393 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0039a23f-73c0-40c3-9c79-3d747d7d9896-metrics-certs\") pod \"controller-6968d8fdc4-br9ll\" (UID: \"0039a23f-73c0-40c3-9c79-3d747d7d9896\") " pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.390742 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.483847 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" event={"ID":"f1bfcb26-2369-429c-bf4d-1c4234d9c28c","Type":"ContainerStarted","Data":"71ae5cbd672872971a776a8dc722f187d38f89879d8978aa56f2b7a5ea867bd4"} Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.484642 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"ee916bd8235e82d54878f6cf667350eb2b26f8b0d3b4bbf115acac51de4d33b0"} Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.487225 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="eabdb53100645a7db124d74211393e44eb29247556832b21f6981f388ac52c1c" exitCode=0 Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.487267 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"eabdb53100645a7db124d74211393e44eb29247556832b21f6981f388ac52c1c"} Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.487295 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"ced9aabb8b9dead9319c14576d709e760cf8c817715a608c130685f02a1c32b6"} Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.487311 4811 scope.go:117] "RemoveContainer" containerID="27340b9d5e1d8d5e0d3aff5e91d7f338c280cdca6a861ea86d43ab3789100f43" Jan 28 16:01:03 crc kubenswrapper[4811]: I0128 16:01:03.665857 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-br9ll"] Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.160572 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.166234 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/28b5a61d-bf80-4508-bb79-a6791d1c6d57-memberlist\") pod \"speaker-642th\" (UID: \"28b5a61d-bf80-4508-bb79-a6791d1c6d57\") " pod="metallb-system/speaker-642th" Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.261973 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-642th" Jan 28 16:01:04 crc kubenswrapper[4811]: W0128 16:01:04.280400 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28b5a61d_bf80_4508_bb79_a6791d1c6d57.slice/crio-dc3d1a57d27407b8a425092bd529b7d0ab44388d18a4d9fd79338667ee214cb8 WatchSource:0}: Error finding container dc3d1a57d27407b8a425092bd529b7d0ab44388d18a4d9fd79338667ee214cb8: Status 404 returned error can't find the container with id dc3d1a57d27407b8a425092bd529b7d0ab44388d18a4d9fd79338667ee214cb8 Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.503476 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-br9ll" event={"ID":"0039a23f-73c0-40c3-9c79-3d747d7d9896","Type":"ContainerStarted","Data":"abcae66e7b2a4e67ce601556910251ba94485b4f26a1a4e2004c13517cd577a2"} Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.505092 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.505108 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-br9ll" event={"ID":"0039a23f-73c0-40c3-9c79-3d747d7d9896","Type":"ContainerStarted","Data":"e14dd1e79e41bd03259cb41bbe80459d3d7eae87ae0d6a06a1072f370d2e79e5"} Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.505151 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-br9ll" event={"ID":"0039a23f-73c0-40c3-9c79-3d747d7d9896","Type":"ContainerStarted","Data":"09c890b962406e74f21a7265e46472ea4796d4a6ad69301da9da2cb96f7324df"} Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.506764 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-642th" event={"ID":"28b5a61d-bf80-4508-bb79-a6791d1c6d57","Type":"ContainerStarted","Data":"b08e6f5e0bf00c301fad0a75a901d2d61d23b23c527114ae47349af6797fc505"} Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.506812 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-642th" event={"ID":"28b5a61d-bf80-4508-bb79-a6791d1c6d57","Type":"ContainerStarted","Data":"dc3d1a57d27407b8a425092bd529b7d0ab44388d18a4d9fd79338667ee214cb8"} Jan 28 16:01:04 crc kubenswrapper[4811]: I0128 16:01:04.523836 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-br9ll" podStartSLOduration=2.523814829 podStartE2EDuration="2.523814829s" podCreationTimestamp="2026-01-28 16:01:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:01:04.521793874 +0000 UTC m=+957.276157457" watchObservedRunningTime="2026-01-28 16:01:04.523814829 +0000 UTC m=+957.278178422" Jan 28 16:01:05 crc kubenswrapper[4811]: I0128 16:01:05.516149 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-642th" event={"ID":"28b5a61d-bf80-4508-bb79-a6791d1c6d57","Type":"ContainerStarted","Data":"9b847d8af92fd78bfeb70637ed2096bbd91d775242a59c45190f7e37afb5cc8e"} Jan 28 16:01:05 crc kubenswrapper[4811]: I0128 16:01:05.543706 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-642th" podStartSLOduration=3.543689747 podStartE2EDuration="3.543689747s" podCreationTimestamp="2026-01-28 16:01:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:01:05.53526508 +0000 UTC m=+958.289628663" watchObservedRunningTime="2026-01-28 16:01:05.543689747 +0000 UTC m=+958.298053330" Jan 28 16:01:06 crc kubenswrapper[4811]: I0128 16:01:06.521551 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-642th" Jan 28 16:01:07 crc kubenswrapper[4811]: I0128 16:01:07.813558 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:01:07 crc kubenswrapper[4811]: I0128 16:01:07.814044 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:01:07 crc kubenswrapper[4811]: I0128 16:01:07.858952 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:01:08 crc kubenswrapper[4811]: I0128 16:01:08.592765 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:01:08 crc kubenswrapper[4811]: I0128 16:01:08.645366 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5shzv"] Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.568592 4811 generic.go:334] "Generic (PLEG): container finished" podID="65b6bd46-8c0d-449a-a7b6-42c8c7933f64" containerID="705133c4ef30827b8e9fd60d231d88664cd385fac966dfeb8e61274ab2a68f42" exitCode=0 Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.568653 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerDied","Data":"705133c4ef30827b8e9fd60d231d88664cd385fac966dfeb8e61274ab2a68f42"} Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.571335 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5shzv" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="registry-server" containerID="cri-o://eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9" gracePeriod=2 Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.572247 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" event={"ID":"f1bfcb26-2369-429c-bf4d-1c4234d9c28c","Type":"ContainerStarted","Data":"7a9671103bfa3dcbf0d90ef29d669143c9798906fe7035c79bbf21429fbf4db5"} Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.572279 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.614777 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" podStartSLOduration=1.286453163 podStartE2EDuration="8.614750108s" podCreationTimestamp="2026-01-28 16:01:02 +0000 UTC" firstStartedPulling="2026-01-28 16:01:02.876949874 +0000 UTC m=+955.631313457" lastFinishedPulling="2026-01-28 16:01:10.205246809 +0000 UTC m=+962.959610402" observedRunningTime="2026-01-28 16:01:10.612479657 +0000 UTC m=+963.366843260" watchObservedRunningTime="2026-01-28 16:01:10.614750108 +0000 UTC m=+963.369113731" Jan 28 16:01:10 crc kubenswrapper[4811]: I0128 16:01:10.903247 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.058496 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-utilities\") pod \"5dbf7432-5c23-4105-83ea-64eff4879ff5\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.058576 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v7rn\" (UniqueName: \"kubernetes.io/projected/5dbf7432-5c23-4105-83ea-64eff4879ff5-kube-api-access-4v7rn\") pod \"5dbf7432-5c23-4105-83ea-64eff4879ff5\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.058651 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-catalog-content\") pod \"5dbf7432-5c23-4105-83ea-64eff4879ff5\" (UID: \"5dbf7432-5c23-4105-83ea-64eff4879ff5\") " Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.061851 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-utilities" (OuterVolumeSpecName: "utilities") pod "5dbf7432-5c23-4105-83ea-64eff4879ff5" (UID: "5dbf7432-5c23-4105-83ea-64eff4879ff5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.067749 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dbf7432-5c23-4105-83ea-64eff4879ff5-kube-api-access-4v7rn" (OuterVolumeSpecName: "kube-api-access-4v7rn") pod "5dbf7432-5c23-4105-83ea-64eff4879ff5" (UID: "5dbf7432-5c23-4105-83ea-64eff4879ff5"). InnerVolumeSpecName "kube-api-access-4v7rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.108589 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5dbf7432-5c23-4105-83ea-64eff4879ff5" (UID: "5dbf7432-5c23-4105-83ea-64eff4879ff5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.161608 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.161659 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v7rn\" (UniqueName: \"kubernetes.io/projected/5dbf7432-5c23-4105-83ea-64eff4879ff5-kube-api-access-4v7rn\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.161673 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dbf7432-5c23-4105-83ea-64eff4879ff5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.578319 4811 generic.go:334] "Generic (PLEG): container finished" podID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerID="eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9" exitCode=0 Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.578367 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5shzv" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.578423 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5shzv" event={"ID":"5dbf7432-5c23-4105-83ea-64eff4879ff5","Type":"ContainerDied","Data":"eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9"} Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.578489 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5shzv" event={"ID":"5dbf7432-5c23-4105-83ea-64eff4879ff5","Type":"ContainerDied","Data":"6cb6cb28dbd050523617191e4aa299a5e5bfba8a122298f3e606ff4d62f31052"} Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.578508 4811 scope.go:117] "RemoveContainer" containerID="eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.580643 4811 generic.go:334] "Generic (PLEG): container finished" podID="65b6bd46-8c0d-449a-a7b6-42c8c7933f64" containerID="523d589d0b999d586c84573a6f5e62b45af087397788dc09177344390bcf087d" exitCode=0 Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.580688 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerDied","Data":"523d589d0b999d586c84573a6f5e62b45af087397788dc09177344390bcf087d"} Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.607879 4811 scope.go:117] "RemoveContainer" containerID="9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.643857 4811 scope.go:117] "RemoveContainer" containerID="c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.669536 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5shzv"] Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.679019 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5shzv"] Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.716680 4811 scope.go:117] "RemoveContainer" containerID="eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9" Jan 28 16:01:11 crc kubenswrapper[4811]: E0128 16:01:11.720555 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9\": container with ID starting with eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9 not found: ID does not exist" containerID="eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.720602 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9"} err="failed to get container status \"eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9\": rpc error: code = NotFound desc = could not find container \"eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9\": container with ID starting with eac5d7da741e887273c4029ce70355bcb0070a5422112ef51a55953f5f04cac9 not found: ID does not exist" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.720641 4811 scope.go:117] "RemoveContainer" containerID="9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094" Jan 28 16:01:11 crc kubenswrapper[4811]: E0128 16:01:11.724590 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094\": container with ID starting with 9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094 not found: ID does not exist" containerID="9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.724639 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094"} err="failed to get container status \"9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094\": rpc error: code = NotFound desc = could not find container \"9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094\": container with ID starting with 9c9f3b7b043057ac9f47113eebfc1637b14605f7cd249a45a31e1a5447d59094 not found: ID does not exist" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.724670 4811 scope.go:117] "RemoveContainer" containerID="c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18" Jan 28 16:01:11 crc kubenswrapper[4811]: E0128 16:01:11.725420 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18\": container with ID starting with c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18 not found: ID does not exist" containerID="c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18" Jan 28 16:01:11 crc kubenswrapper[4811]: I0128 16:01:11.725466 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18"} err="failed to get container status \"c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18\": rpc error: code = NotFound desc = could not find container \"c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18\": container with ID starting with c6b358782660896b262fcd79baa547e52359583bb512277737293297a648dd18 not found: ID does not exist" Jan 28 16:01:12 crc kubenswrapper[4811]: I0128 16:01:12.354190 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" path="/var/lib/kubelet/pods/5dbf7432-5c23-4105-83ea-64eff4879ff5/volumes" Jan 28 16:01:12 crc kubenswrapper[4811]: I0128 16:01:12.591165 4811 generic.go:334] "Generic (PLEG): container finished" podID="65b6bd46-8c0d-449a-a7b6-42c8c7933f64" containerID="fad6db629ce0052e98d6c04822c092851afbf30b0d75a4056ebce19ddc0ca1e6" exitCode=0 Jan 28 16:01:12 crc kubenswrapper[4811]: I0128 16:01:12.591231 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerDied","Data":"fad6db629ce0052e98d6c04822c092851afbf30b0d75a4056ebce19ddc0ca1e6"} Jan 28 16:01:13 crc kubenswrapper[4811]: I0128 16:01:13.394345 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-br9ll" Jan 28 16:01:13 crc kubenswrapper[4811]: I0128 16:01:13.600381 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"bdc6212802b46dfc91b9cdab76b2c40f1c686b9930ea35920d99c940de3d7d61"} Jan 28 16:01:13 crc kubenswrapper[4811]: I0128 16:01:13.600425 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"305e5186655e103d3e4ecba8a8a5a935665059cb08d77a38ec6a3f5149a88610"} Jan 28 16:01:13 crc kubenswrapper[4811]: I0128 16:01:13.600456 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"4f718409346f38ce32604c1e0de89f16f129a58a738f3554dda325e3b447f5c6"} Jan 28 16:01:13 crc kubenswrapper[4811]: I0128 16:01:13.600466 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"dc88352037844290c2de46e41980b5d5f1a4e24b23cf91345d2691d9156d4e4c"} Jan 28 16:01:13 crc kubenswrapper[4811]: I0128 16:01:13.600476 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"dbb8c77aa11a664524171581260694638db8edc649371a7c38c3fd3e87839583"} Jan 28 16:01:14 crc kubenswrapper[4811]: I0128 16:01:14.266052 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-642th" Jan 28 16:01:14 crc kubenswrapper[4811]: I0128 16:01:14.614176 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jn9r9" event={"ID":"65b6bd46-8c0d-449a-a7b6-42c8c7933f64","Type":"ContainerStarted","Data":"fb2afe7c82aeadb653dad1a1d8f018ff0bd09ac4fdcf39197e4a694d94c1a98e"} Jan 28 16:01:14 crc kubenswrapper[4811]: I0128 16:01:14.614871 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.528202 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-jn9r9" podStartSLOduration=6.151811373 podStartE2EDuration="13.528181373s" podCreationTimestamp="2026-01-28 16:01:02 +0000 UTC" firstStartedPulling="2026-01-28 16:01:02.816627399 +0000 UTC m=+955.570990982" lastFinishedPulling="2026-01-28 16:01:10.192997379 +0000 UTC m=+962.947360982" observedRunningTime="2026-01-28 16:01:14.648906672 +0000 UTC m=+967.403270315" watchObservedRunningTime="2026-01-28 16:01:15.528181373 +0000 UTC m=+968.282544966" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.529214 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb"] Jan 28 16:01:15 crc kubenswrapper[4811]: E0128 16:01:15.529507 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="extract-content" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.529528 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="extract-content" Jan 28 16:01:15 crc kubenswrapper[4811]: E0128 16:01:15.529548 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="extract-utilities" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.529556 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="extract-utilities" Jan 28 16:01:15 crc kubenswrapper[4811]: E0128 16:01:15.529567 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="registry-server" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.529574 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="registry-server" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.529720 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dbf7432-5c23-4105-83ea-64eff4879ff5" containerName="registry-server" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.530656 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.532564 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.540728 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb"] Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.616884 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h285r\" (UniqueName: \"kubernetes.io/projected/720ab524-c1a3-4203-8533-1432535163fc-kube-api-access-h285r\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.616954 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.617018 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.717898 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h285r\" (UniqueName: \"kubernetes.io/projected/720ab524-c1a3-4203-8533-1432535163fc-kube-api-access-h285r\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.718023 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.718270 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.718793 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.719101 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.743791 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h285r\" (UniqueName: \"kubernetes.io/projected/720ab524-c1a3-4203-8533-1432535163fc-kube-api-access-h285r\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:15 crc kubenswrapper[4811]: I0128 16:01:15.857099 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:16 crc kubenswrapper[4811]: I0128 16:01:16.058836 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb"] Jan 28 16:01:16 crc kubenswrapper[4811]: W0128 16:01:16.074592 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod720ab524_c1a3_4203_8533_1432535163fc.slice/crio-face709d421cb90dcb6cb5de25557c4e033eb67a4adf42d5baf9ab394694f6a1 WatchSource:0}: Error finding container face709d421cb90dcb6cb5de25557c4e033eb67a4adf42d5baf9ab394694f6a1: Status 404 returned error can't find the container with id face709d421cb90dcb6cb5de25557c4e033eb67a4adf42d5baf9ab394694f6a1 Jan 28 16:01:16 crc kubenswrapper[4811]: I0128 16:01:16.626367 4811 generic.go:334] "Generic (PLEG): container finished" podID="720ab524-c1a3-4203-8533-1432535163fc" containerID="338b06c505da6e1b6382ccfbbfa6383d37f9657535412dc86a0b8cf95ecb92b5" exitCode=0 Jan 28 16:01:16 crc kubenswrapper[4811]: I0128 16:01:16.626544 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" event={"ID":"720ab524-c1a3-4203-8533-1432535163fc","Type":"ContainerDied","Data":"338b06c505da6e1b6382ccfbbfa6383d37f9657535412dc86a0b8cf95ecb92b5"} Jan 28 16:01:16 crc kubenswrapper[4811]: I0128 16:01:16.626751 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" event={"ID":"720ab524-c1a3-4203-8533-1432535163fc","Type":"ContainerStarted","Data":"face709d421cb90dcb6cb5de25557c4e033eb67a4adf42d5baf9ab394694f6a1"} Jan 28 16:01:17 crc kubenswrapper[4811]: I0128 16:01:17.654701 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:17 crc kubenswrapper[4811]: I0128 16:01:17.695491 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:20 crc kubenswrapper[4811]: I0128 16:01:20.653194 4811 generic.go:334] "Generic (PLEG): container finished" podID="720ab524-c1a3-4203-8533-1432535163fc" containerID="be81265834210b81a3dcde4e37678abc9835b70217034736df72c8d7d0c6cdde" exitCode=0 Jan 28 16:01:20 crc kubenswrapper[4811]: I0128 16:01:20.653285 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" event={"ID":"720ab524-c1a3-4203-8533-1432535163fc","Type":"ContainerDied","Data":"be81265834210b81a3dcde4e37678abc9835b70217034736df72c8d7d0c6cdde"} Jan 28 16:01:21 crc kubenswrapper[4811]: I0128 16:01:21.665422 4811 generic.go:334] "Generic (PLEG): container finished" podID="720ab524-c1a3-4203-8533-1432535163fc" containerID="ed6f15ba054db18258a482bb226ec71ee4bbcb0fe95e2ba47d6ef1cbe364d078" exitCode=0 Jan 28 16:01:21 crc kubenswrapper[4811]: I0128 16:01:21.665484 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" event={"ID":"720ab524-c1a3-4203-8533-1432535163fc","Type":"ContainerDied","Data":"ed6f15ba054db18258a482bb226ec71ee4bbcb0fe95e2ba47d6ef1cbe364d078"} Jan 28 16:01:22 crc kubenswrapper[4811]: I0128 16:01:22.656473 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-jn9r9" Jan 28 16:01:22 crc kubenswrapper[4811]: I0128 16:01:22.670420 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-kxvh5" Jan 28 16:01:22 crc kubenswrapper[4811]: I0128 16:01:22.932978 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.037651 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-bundle\") pod \"720ab524-c1a3-4203-8533-1432535163fc\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.037774 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-util\") pod \"720ab524-c1a3-4203-8533-1432535163fc\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.038971 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-bundle" (OuterVolumeSpecName: "bundle") pod "720ab524-c1a3-4203-8533-1432535163fc" (UID: "720ab524-c1a3-4203-8533-1432535163fc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.041598 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h285r\" (UniqueName: \"kubernetes.io/projected/720ab524-c1a3-4203-8533-1432535163fc-kube-api-access-h285r\") pod \"720ab524-c1a3-4203-8533-1432535163fc\" (UID: \"720ab524-c1a3-4203-8533-1432535163fc\") " Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.042120 4811 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.048243 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-util" (OuterVolumeSpecName: "util") pod "720ab524-c1a3-4203-8533-1432535163fc" (UID: "720ab524-c1a3-4203-8533-1432535163fc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.050155 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/720ab524-c1a3-4203-8533-1432535163fc-kube-api-access-h285r" (OuterVolumeSpecName: "kube-api-access-h285r") pod "720ab524-c1a3-4203-8533-1432535163fc" (UID: "720ab524-c1a3-4203-8533-1432535163fc"). InnerVolumeSpecName "kube-api-access-h285r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.143871 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h285r\" (UniqueName: \"kubernetes.io/projected/720ab524-c1a3-4203-8533-1432535163fc-kube-api-access-h285r\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.144127 4811 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/720ab524-c1a3-4203-8533-1432535163fc-util\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.681157 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" event={"ID":"720ab524-c1a3-4203-8533-1432535163fc","Type":"ContainerDied","Data":"face709d421cb90dcb6cb5de25557c4e033eb67a4adf42d5baf9ab394694f6a1"} Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.681200 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb" Jan 28 16:01:23 crc kubenswrapper[4811]: I0128 16:01:23.681219 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="face709d421cb90dcb6cb5de25557c4e033eb67a4adf42d5baf9ab394694f6a1" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.168746 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx"] Jan 28 16:01:28 crc kubenswrapper[4811]: E0128 16:01:28.169344 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="extract" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.169359 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="extract" Jan 28 16:01:28 crc kubenswrapper[4811]: E0128 16:01:28.169372 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="pull" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.169380 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="pull" Jan 28 16:01:28 crc kubenswrapper[4811]: E0128 16:01:28.169397 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="util" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.169405 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="util" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.169558 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="720ab524-c1a3-4203-8533-1432535163fc" containerName="extract" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.170084 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.173190 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.173205 4811 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-s4fbb" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.175845 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.182469 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx"] Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.219639 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vttg2\" (UniqueName: \"kubernetes.io/projected/ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e-kube-api-access-vttg2\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7dcgx\" (UID: \"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.219699 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7dcgx\" (UID: \"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.320656 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vttg2\" (UniqueName: \"kubernetes.io/projected/ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e-kube-api-access-vttg2\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7dcgx\" (UID: \"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.320724 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7dcgx\" (UID: \"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.321267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7dcgx\" (UID: \"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.342955 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vttg2\" (UniqueName: \"kubernetes.io/projected/ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e-kube-api-access-vttg2\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7dcgx\" (UID: \"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.486090 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" Jan 28 16:01:28 crc kubenswrapper[4811]: I0128 16:01:28.918496 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx"] Jan 28 16:01:29 crc kubenswrapper[4811]: I0128 16:01:29.718899 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" event={"ID":"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e","Type":"ContainerStarted","Data":"69f6a5edc1431a8a49c06aa961ba7f7e1843de292f55b91a78d3f7d9ce89e795"} Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.495861 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5kvmx"] Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.501189 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.528336 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5kvmx"] Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.560713 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-catalog-content\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.560790 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq6nb\" (UniqueName: \"kubernetes.io/projected/56ad18d3-c01d-49eb-9732-b14be97d42bb-kube-api-access-mq6nb\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.560820 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-utilities\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.662015 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-catalog-content\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.662092 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq6nb\" (UniqueName: \"kubernetes.io/projected/56ad18d3-c01d-49eb-9732-b14be97d42bb-kube-api-access-mq6nb\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.662127 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-utilities\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.662629 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-catalog-content\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.662726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-utilities\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.683625 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq6nb\" (UniqueName: \"kubernetes.io/projected/56ad18d3-c01d-49eb-9732-b14be97d42bb-kube-api-access-mq6nb\") pod \"certified-operators-5kvmx\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:31 crc kubenswrapper[4811]: I0128 16:01:31.835324 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:36 crc kubenswrapper[4811]: I0128 16:01:36.890767 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m8hts"] Jan 28 16:01:36 crc kubenswrapper[4811]: I0128 16:01:36.893223 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:36 crc kubenswrapper[4811]: I0128 16:01:36.901922 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8hts"] Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.048831 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-catalog-content\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.048908 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-utilities\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.049122 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9psk\" (UniqueName: \"kubernetes.io/projected/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-kube-api-access-h9psk\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.092646 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5kvmx"] Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.150790 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-catalog-content\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.150875 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-utilities\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.150921 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9psk\" (UniqueName: \"kubernetes.io/projected/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-kube-api-access-h9psk\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.151394 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-catalog-content\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.151532 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-utilities\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.173167 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9psk\" (UniqueName: \"kubernetes.io/projected/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-kube-api-access-h9psk\") pod \"redhat-marketplace-m8hts\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.215588 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.633520 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8hts"] Jan 28 16:01:37 crc kubenswrapper[4811]: W0128 16:01:37.639641 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ea1ca4c_4c40_424d_b539_4e0330a9f5fe.slice/crio-d4ef80f0be993c39ddfffaeb88cb99f4fe33cad311cfc328c715dba461a397ae WatchSource:0}: Error finding container d4ef80f0be993c39ddfffaeb88cb99f4fe33cad311cfc328c715dba461a397ae: Status 404 returned error can't find the container with id d4ef80f0be993c39ddfffaeb88cb99f4fe33cad311cfc328c715dba461a397ae Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.784819 4811 generic.go:334] "Generic (PLEG): container finished" podID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerID="dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463" exitCode=0 Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.784876 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerDied","Data":"dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463"} Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.784900 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerStarted","Data":"d4ef80f0be993c39ddfffaeb88cb99f4fe33cad311cfc328c715dba461a397ae"} Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.789314 4811 generic.go:334] "Generic (PLEG): container finished" podID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerID="e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200" exitCode=0 Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.789364 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerDied","Data":"e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200"} Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.789385 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerStarted","Data":"17e6aca24c342de687e03a4d10e6cc042aca83a9dbbc17d93a5746a6868c8059"} Jan 28 16:01:37 crc kubenswrapper[4811]: I0128 16:01:37.791855 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" event={"ID":"ee6a59e4-962a-4b6a-afe4-cc2ba3fa674e","Type":"ContainerStarted","Data":"bd5d94406cdc8c10458ce496f78e501f5a51ff1017875d332265e0b23a254b3e"} Jan 28 16:01:38 crc kubenswrapper[4811]: I0128 16:01:38.800046 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerStarted","Data":"0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9"} Jan 28 16:01:38 crc kubenswrapper[4811]: I0128 16:01:38.802643 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerStarted","Data":"e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695"} Jan 28 16:01:38 crc kubenswrapper[4811]: I0128 16:01:38.822524 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7dcgx" podStartSLOduration=2.940421001 podStartE2EDuration="10.822505344s" podCreationTimestamp="2026-01-28 16:01:28 +0000 UTC" firstStartedPulling="2026-01-28 16:01:28.929673772 +0000 UTC m=+981.684037355" lastFinishedPulling="2026-01-28 16:01:36.811758115 +0000 UTC m=+989.566121698" observedRunningTime="2026-01-28 16:01:37.849883681 +0000 UTC m=+990.604247274" watchObservedRunningTime="2026-01-28 16:01:38.822505344 +0000 UTC m=+991.576868927" Jan 28 16:01:39 crc kubenswrapper[4811]: I0128 16:01:39.810477 4811 generic.go:334] "Generic (PLEG): container finished" podID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerID="e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695" exitCode=0 Jan 28 16:01:39 crc kubenswrapper[4811]: I0128 16:01:39.810770 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerDied","Data":"e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695"} Jan 28 16:01:39 crc kubenswrapper[4811]: I0128 16:01:39.815593 4811 generic.go:334] "Generic (PLEG): container finished" podID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerID="0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9" exitCode=0 Jan 28 16:01:39 crc kubenswrapper[4811]: I0128 16:01:39.815642 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerDied","Data":"0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9"} Jan 28 16:01:40 crc kubenswrapper[4811]: I0128 16:01:40.822982 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerStarted","Data":"4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d"} Jan 28 16:01:40 crc kubenswrapper[4811]: I0128 16:01:40.824627 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerStarted","Data":"5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd"} Jan 28 16:01:40 crc kubenswrapper[4811]: I0128 16:01:40.842030 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5kvmx" podStartSLOduration=7.330525854 podStartE2EDuration="9.84200863s" podCreationTimestamp="2026-01-28 16:01:31 +0000 UTC" firstStartedPulling="2026-01-28 16:01:37.790528195 +0000 UTC m=+990.544891778" lastFinishedPulling="2026-01-28 16:01:40.302010971 +0000 UTC m=+993.056374554" observedRunningTime="2026-01-28 16:01:40.840077657 +0000 UTC m=+993.594441240" watchObservedRunningTime="2026-01-28 16:01:40.84200863 +0000 UTC m=+993.596372223" Jan 28 16:01:40 crc kubenswrapper[4811]: I0128 16:01:40.860695 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m8hts" podStartSLOduration=2.389657472 podStartE2EDuration="4.860674364s" podCreationTimestamp="2026-01-28 16:01:36 +0000 UTC" firstStartedPulling="2026-01-28 16:01:37.786143077 +0000 UTC m=+990.540506660" lastFinishedPulling="2026-01-28 16:01:40.257159969 +0000 UTC m=+993.011523552" observedRunningTime="2026-01-28 16:01:40.856353877 +0000 UTC m=+993.610717450" watchObservedRunningTime="2026-01-28 16:01:40.860674364 +0000 UTC m=+993.615037947" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.317541 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-thcwr"] Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.318461 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.320368 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.320630 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.327548 4811 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-m5dw7" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.335482 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-thcwr"] Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.406868 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rms9r\" (UniqueName: \"kubernetes.io/projected/68ad0985-ed35-40bc-8606-6ea7ad59be62-kube-api-access-rms9r\") pod \"cert-manager-webhook-f4fb5df64-thcwr\" (UID: \"68ad0985-ed35-40bc-8606-6ea7ad59be62\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.406930 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68ad0985-ed35-40bc-8606-6ea7ad59be62-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-thcwr\" (UID: \"68ad0985-ed35-40bc-8606-6ea7ad59be62\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.508134 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68ad0985-ed35-40bc-8606-6ea7ad59be62-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-thcwr\" (UID: \"68ad0985-ed35-40bc-8606-6ea7ad59be62\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.508553 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rms9r\" (UniqueName: \"kubernetes.io/projected/68ad0985-ed35-40bc-8606-6ea7ad59be62-kube-api-access-rms9r\") pod \"cert-manager-webhook-f4fb5df64-thcwr\" (UID: \"68ad0985-ed35-40bc-8606-6ea7ad59be62\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.530309 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rms9r\" (UniqueName: \"kubernetes.io/projected/68ad0985-ed35-40bc-8606-6ea7ad59be62-kube-api-access-rms9r\") pod \"cert-manager-webhook-f4fb5df64-thcwr\" (UID: \"68ad0985-ed35-40bc-8606-6ea7ad59be62\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.531896 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68ad0985-ed35-40bc-8606-6ea7ad59be62-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-thcwr\" (UID: \"68ad0985-ed35-40bc-8606-6ea7ad59be62\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.618163 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds"] Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.619086 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.620693 4811 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-7vckd" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.633187 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds"] Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.633757 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.712150 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3cbb2167-726d-4f37-a7bf-865d4e49c4f8-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-hz2ds\" (UID: \"3cbb2167-726d-4f37-a7bf-865d4e49c4f8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.712215 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffzxl\" (UniqueName: \"kubernetes.io/projected/3cbb2167-726d-4f37-a7bf-865d4e49c4f8-kube-api-access-ffzxl\") pod \"cert-manager-cainjector-855d9ccff4-hz2ds\" (UID: \"3cbb2167-726d-4f37-a7bf-865d4e49c4f8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.813483 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffzxl\" (UniqueName: \"kubernetes.io/projected/3cbb2167-726d-4f37-a7bf-865d4e49c4f8-kube-api-access-ffzxl\") pod \"cert-manager-cainjector-855d9ccff4-hz2ds\" (UID: \"3cbb2167-726d-4f37-a7bf-865d4e49c4f8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.814286 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3cbb2167-726d-4f37-a7bf-865d4e49c4f8-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-hz2ds\" (UID: \"3cbb2167-726d-4f37-a7bf-865d4e49c4f8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.834342 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3cbb2167-726d-4f37-a7bf-865d4e49c4f8-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-hz2ds\" (UID: \"3cbb2167-726d-4f37-a7bf-865d4e49c4f8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.835164 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffzxl\" (UniqueName: \"kubernetes.io/projected/3cbb2167-726d-4f37-a7bf-865d4e49c4f8-kube-api-access-ffzxl\") pod \"cert-manager-cainjector-855d9ccff4-hz2ds\" (UID: \"3cbb2167-726d-4f37-a7bf-865d4e49c4f8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.835450 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.835644 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:41 crc kubenswrapper[4811]: I0128 16:01:41.938741 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" Jan 28 16:01:42 crc kubenswrapper[4811]: I0128 16:01:42.030484 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-thcwr"] Jan 28 16:01:42 crc kubenswrapper[4811]: I0128 16:01:42.436644 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds"] Jan 28 16:01:42 crc kubenswrapper[4811]: W0128 16:01:42.444125 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3cbb2167_726d_4f37_a7bf_865d4e49c4f8.slice/crio-ca2b21d408629379a69d753305b626e644bb0e8a51c2f2cea430f6740f6c9760 WatchSource:0}: Error finding container ca2b21d408629379a69d753305b626e644bb0e8a51c2f2cea430f6740f6c9760: Status 404 returned error can't find the container with id ca2b21d408629379a69d753305b626e644bb0e8a51c2f2cea430f6740f6c9760 Jan 28 16:01:42 crc kubenswrapper[4811]: I0128 16:01:42.845785 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" event={"ID":"68ad0985-ed35-40bc-8606-6ea7ad59be62","Type":"ContainerStarted","Data":"6ef5ab427022e07deb76b30b2f4fb098154dd0ec1cec4964cac7ed15d782e727"} Jan 28 16:01:42 crc kubenswrapper[4811]: I0128 16:01:42.848140 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" event={"ID":"3cbb2167-726d-4f37-a7bf-865d4e49c4f8","Type":"ContainerStarted","Data":"ca2b21d408629379a69d753305b626e644bb0e8a51c2f2cea430f6740f6c9760"} Jan 28 16:01:42 crc kubenswrapper[4811]: I0128 16:01:42.882463 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-5kvmx" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="registry-server" probeResult="failure" output=< Jan 28 16:01:42 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 16:01:42 crc kubenswrapper[4811]: > Jan 28 16:01:47 crc kubenswrapper[4811]: I0128 16:01:47.216376 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:47 crc kubenswrapper[4811]: I0128 16:01:47.217610 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:47 crc kubenswrapper[4811]: I0128 16:01:47.254260 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:47 crc kubenswrapper[4811]: I0128 16:01:47.918540 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:47 crc kubenswrapper[4811]: I0128 16:01:47.964212 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8hts"] Jan 28 16:01:49 crc kubenswrapper[4811]: I0128 16:01:49.893369 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m8hts" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="registry-server" containerID="cri-o://5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd" gracePeriod=2 Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.267918 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.449007 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-utilities\") pod \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.449068 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-catalog-content\") pod \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.449087 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9psk\" (UniqueName: \"kubernetes.io/projected/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-kube-api-access-h9psk\") pod \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\" (UID: \"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe\") " Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.449895 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-utilities" (OuterVolumeSpecName: "utilities") pod "2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" (UID: "2ea1ca4c-4c40-424d-b539-4e0330a9f5fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.454827 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-kube-api-access-h9psk" (OuterVolumeSpecName: "kube-api-access-h9psk") pod "2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" (UID: "2ea1ca4c-4c40-424d-b539-4e0330a9f5fe"). InnerVolumeSpecName "kube-api-access-h9psk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.470968 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" (UID: "2ea1ca4c-4c40-424d-b539-4e0330a9f5fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.551595 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.551633 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.551648 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9psk\" (UniqueName: \"kubernetes.io/projected/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe-kube-api-access-h9psk\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.902188 4811 generic.go:334] "Generic (PLEG): container finished" podID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerID="5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd" exitCode=0 Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.902272 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerDied","Data":"5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd"} Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.902480 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m8hts" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.902539 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m8hts" event={"ID":"2ea1ca4c-4c40-424d-b539-4e0330a9f5fe","Type":"ContainerDied","Data":"d4ef80f0be993c39ddfffaeb88cb99f4fe33cad311cfc328c715dba461a397ae"} Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.902569 4811 scope.go:117] "RemoveContainer" containerID="5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.904261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" event={"ID":"68ad0985-ed35-40bc-8606-6ea7ad59be62","Type":"ContainerStarted","Data":"dd069df198657fc6d9f42d346b1045a2835deed4a2b59eb02b03fea24a03a3ba"} Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.904439 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.905992 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" event={"ID":"3cbb2167-726d-4f37-a7bf-865d4e49c4f8","Type":"ContainerStarted","Data":"1d88cb27486306521eef14978a7df5259ea87b9171a221285582909f701898ab"} Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.921388 4811 scope.go:117] "RemoveContainer" containerID="e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.934507 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-hz2ds" podStartSLOduration=2.622658622 podStartE2EDuration="9.934480971s" podCreationTimestamp="2026-01-28 16:01:41 +0000 UTC" firstStartedPulling="2026-01-28 16:01:42.446071394 +0000 UTC m=+995.200434977" lastFinishedPulling="2026-01-28 16:01:49.757893743 +0000 UTC m=+1002.512257326" observedRunningTime="2026-01-28 16:01:50.928979902 +0000 UTC m=+1003.683343495" watchObservedRunningTime="2026-01-28 16:01:50.934480971 +0000 UTC m=+1003.688844574" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.942372 4811 scope.go:117] "RemoveContainer" containerID="dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.957113 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" podStartSLOduration=2.326499156 podStartE2EDuration="9.957092542s" podCreationTimestamp="2026-01-28 16:01:41 +0000 UTC" firstStartedPulling="2026-01-28 16:01:42.088905628 +0000 UTC m=+994.843269211" lastFinishedPulling="2026-01-28 16:01:49.719499014 +0000 UTC m=+1002.473862597" observedRunningTime="2026-01-28 16:01:50.945446477 +0000 UTC m=+1003.699810070" watchObservedRunningTime="2026-01-28 16:01:50.957092542 +0000 UTC m=+1003.711456125" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.973079 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8hts"] Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.977672 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m8hts"] Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.994362 4811 scope.go:117] "RemoveContainer" containerID="5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd" Jan 28 16:01:50 crc kubenswrapper[4811]: E0128 16:01:50.995778 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd\": container with ID starting with 5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd not found: ID does not exist" containerID="5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.995824 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd"} err="failed to get container status \"5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd\": rpc error: code = NotFound desc = could not find container \"5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd\": container with ID starting with 5b7eb549443a3de517394f02f8de3da132cc3fae2e0d949e5dd2adf3a826fdbd not found: ID does not exist" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.995853 4811 scope.go:117] "RemoveContainer" containerID="e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695" Jan 28 16:01:50 crc kubenswrapper[4811]: E0128 16:01:50.996122 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695\": container with ID starting with e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695 not found: ID does not exist" containerID="e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.996159 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695"} err="failed to get container status \"e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695\": rpc error: code = NotFound desc = could not find container \"e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695\": container with ID starting with e8c99315a9ae4eab7e5614adf029405b57dd92dcd1e2bfad02b77e7d9b2c1695 not found: ID does not exist" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.996174 4811 scope.go:117] "RemoveContainer" containerID="dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463" Jan 28 16:01:50 crc kubenswrapper[4811]: E0128 16:01:50.996484 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463\": container with ID starting with dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463 not found: ID does not exist" containerID="dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463" Jan 28 16:01:50 crc kubenswrapper[4811]: I0128 16:01:50.996526 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463"} err="failed to get container status \"dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463\": rpc error: code = NotFound desc = could not find container \"dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463\": container with ID starting with dc430950962dbffeaac1a77915ee98387e0a4744147962d8414cd6864fb36463 not found: ID does not exist" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.058330 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-ldbdm"] Jan 28 16:01:51 crc kubenswrapper[4811]: E0128 16:01:51.058655 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="registry-server" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.058676 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="registry-server" Jan 28 16:01:51 crc kubenswrapper[4811]: E0128 16:01:51.058690 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="extract-utilities" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.058698 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="extract-utilities" Jan 28 16:01:51 crc kubenswrapper[4811]: E0128 16:01:51.058714 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="extract-content" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.058722 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="extract-content" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.058867 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" containerName="registry-server" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.059358 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.063734 4811 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-vmmmv" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.082147 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-ldbdm"] Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.161426 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d01d6ff4-be2c-42c2-92a6-fe8550d989b8-bound-sa-token\") pod \"cert-manager-86cb77c54b-ldbdm\" (UID: \"d01d6ff4-be2c-42c2-92a6-fe8550d989b8\") " pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.161837 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbhnz\" (UniqueName: \"kubernetes.io/projected/d01d6ff4-be2c-42c2-92a6-fe8550d989b8-kube-api-access-bbhnz\") pod \"cert-manager-86cb77c54b-ldbdm\" (UID: \"d01d6ff4-be2c-42c2-92a6-fe8550d989b8\") " pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.263207 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbhnz\" (UniqueName: \"kubernetes.io/projected/d01d6ff4-be2c-42c2-92a6-fe8550d989b8-kube-api-access-bbhnz\") pod \"cert-manager-86cb77c54b-ldbdm\" (UID: \"d01d6ff4-be2c-42c2-92a6-fe8550d989b8\") " pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.263259 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d01d6ff4-be2c-42c2-92a6-fe8550d989b8-bound-sa-token\") pod \"cert-manager-86cb77c54b-ldbdm\" (UID: \"d01d6ff4-be2c-42c2-92a6-fe8550d989b8\") " pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.279030 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d01d6ff4-be2c-42c2-92a6-fe8550d989b8-bound-sa-token\") pod \"cert-manager-86cb77c54b-ldbdm\" (UID: \"d01d6ff4-be2c-42c2-92a6-fe8550d989b8\") " pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.279197 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbhnz\" (UniqueName: \"kubernetes.io/projected/d01d6ff4-be2c-42c2-92a6-fe8550d989b8-kube-api-access-bbhnz\") pod \"cert-manager-86cb77c54b-ldbdm\" (UID: \"d01d6ff4-be2c-42c2-92a6-fe8550d989b8\") " pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.388685 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-ldbdm" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.804413 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-ldbdm"] Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.890816 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.920678 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-ldbdm" event={"ID":"d01d6ff4-be2c-42c2-92a6-fe8550d989b8","Type":"ContainerStarted","Data":"b16893bc010fd5b5d782cbd793de7a93abd2563473f17f57d7389255ddae6e46"} Jan 28 16:01:51 crc kubenswrapper[4811]: I0128 16:01:51.930466 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:52 crc kubenswrapper[4811]: I0128 16:01:52.365167 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea1ca4c-4c40-424d-b539-4e0330a9f5fe" path="/var/lib/kubelet/pods/2ea1ca4c-4c40-424d-b539-4e0330a9f5fe/volumes" Jan 28 16:01:52 crc kubenswrapper[4811]: I0128 16:01:52.883716 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5kvmx"] Jan 28 16:01:52 crc kubenswrapper[4811]: I0128 16:01:52.932973 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-ldbdm" event={"ID":"d01d6ff4-be2c-42c2-92a6-fe8550d989b8","Type":"ContainerStarted","Data":"d65eab2baa3ad40d1c11257949de092af9dc7aec54799c53c1e0ab0757bcf610"} Jan 28 16:01:52 crc kubenswrapper[4811]: I0128 16:01:52.933172 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5kvmx" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="registry-server" containerID="cri-o://4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d" gracePeriod=2 Jan 28 16:01:52 crc kubenswrapper[4811]: I0128 16:01:52.958122 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-ldbdm" podStartSLOduration=1.958095967 podStartE2EDuration="1.958095967s" podCreationTimestamp="2026-01-28 16:01:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:01:52.952001422 +0000 UTC m=+1005.706365015" watchObservedRunningTime="2026-01-28 16:01:52.958095967 +0000 UTC m=+1005.712459560" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.349209 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.500161 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-utilities\") pod \"56ad18d3-c01d-49eb-9732-b14be97d42bb\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.500274 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-catalog-content\") pod \"56ad18d3-c01d-49eb-9732-b14be97d42bb\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.500422 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq6nb\" (UniqueName: \"kubernetes.io/projected/56ad18d3-c01d-49eb-9732-b14be97d42bb-kube-api-access-mq6nb\") pod \"56ad18d3-c01d-49eb-9732-b14be97d42bb\" (UID: \"56ad18d3-c01d-49eb-9732-b14be97d42bb\") " Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.503978 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-utilities" (OuterVolumeSpecName: "utilities") pod "56ad18d3-c01d-49eb-9732-b14be97d42bb" (UID: "56ad18d3-c01d-49eb-9732-b14be97d42bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.510912 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56ad18d3-c01d-49eb-9732-b14be97d42bb-kube-api-access-mq6nb" (OuterVolumeSpecName: "kube-api-access-mq6nb") pod "56ad18d3-c01d-49eb-9732-b14be97d42bb" (UID: "56ad18d3-c01d-49eb-9732-b14be97d42bb"). InnerVolumeSpecName "kube-api-access-mq6nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.554560 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56ad18d3-c01d-49eb-9732-b14be97d42bb" (UID: "56ad18d3-c01d-49eb-9732-b14be97d42bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.602567 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.602621 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56ad18d3-c01d-49eb-9732-b14be97d42bb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.602644 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq6nb\" (UniqueName: \"kubernetes.io/projected/56ad18d3-c01d-49eb-9732-b14be97d42bb-kube-api-access-mq6nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.942157 4811 generic.go:334] "Generic (PLEG): container finished" podID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerID="4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d" exitCode=0 Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.943866 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerDied","Data":"4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d"} Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.943920 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kvmx" event={"ID":"56ad18d3-c01d-49eb-9732-b14be97d42bb","Type":"ContainerDied","Data":"17e6aca24c342de687e03a4d10e6cc042aca83a9dbbc17d93a5746a6868c8059"} Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.943927 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kvmx" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.943953 4811 scope.go:117] "RemoveContainer" containerID="4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.962254 4811 scope.go:117] "RemoveContainer" containerID="0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.992355 4811 scope.go:117] "RemoveContainer" containerID="e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200" Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.994326 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5kvmx"] Jan 28 16:01:53 crc kubenswrapper[4811]: I0128 16:01:53.999564 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5kvmx"] Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.018259 4811 scope.go:117] "RemoveContainer" containerID="4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d" Jan 28 16:01:54 crc kubenswrapper[4811]: E0128 16:01:54.018649 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d\": container with ID starting with 4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d not found: ID does not exist" containerID="4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d" Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.018684 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d"} err="failed to get container status \"4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d\": rpc error: code = NotFound desc = could not find container \"4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d\": container with ID starting with 4c9b90d556ffe4f8afb6d7e9cd9e56dffa33c46f1abd4d2646f98e42a60ef64d not found: ID does not exist" Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.018707 4811 scope.go:117] "RemoveContainer" containerID="0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9" Jan 28 16:01:54 crc kubenswrapper[4811]: E0128 16:01:54.018964 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9\": container with ID starting with 0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9 not found: ID does not exist" containerID="0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9" Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.019004 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9"} err="failed to get container status \"0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9\": rpc error: code = NotFound desc = could not find container \"0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9\": container with ID starting with 0e132d74f3d0e212d902f1614ddcf93ebb4c4ee86793a19631c7da342a733fc9 not found: ID does not exist" Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.019029 4811 scope.go:117] "RemoveContainer" containerID="e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200" Jan 28 16:01:54 crc kubenswrapper[4811]: E0128 16:01:54.019246 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200\": container with ID starting with e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200 not found: ID does not exist" containerID="e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200" Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.019274 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200"} err="failed to get container status \"e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200\": rpc error: code = NotFound desc = could not find container \"e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200\": container with ID starting with e04f4452f5d06c04948ec5bb83df48b65930e4dc072fd73410672d000db47200 not found: ID does not exist" Jan 28 16:01:54 crc kubenswrapper[4811]: I0128 16:01:54.349067 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" path="/var/lib/kubelet/pods/56ad18d3-c01d-49eb-9732-b14be97d42bb/volumes" Jan 28 16:01:56 crc kubenswrapper[4811]: I0128 16:01:56.637163 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-thcwr" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.021403 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wtvcq"] Jan 28 16:02:00 crc kubenswrapper[4811]: E0128 16:02:00.021919 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="extract-utilities" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.021930 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="extract-utilities" Jan 28 16:02:00 crc kubenswrapper[4811]: E0128 16:02:00.021944 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="extract-content" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.021949 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="extract-content" Jan 28 16:02:00 crc kubenswrapper[4811]: E0128 16:02:00.021960 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="registry-server" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.021967 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="registry-server" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.022074 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="56ad18d3-c01d-49eb-9732-b14be97d42bb" containerName="registry-server" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.022508 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.025721 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rff58" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.025721 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.026726 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.034334 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wtvcq"] Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.188642 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl7nc\" (UniqueName: \"kubernetes.io/projected/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03-kube-api-access-pl7nc\") pod \"openstack-operator-index-wtvcq\" (UID: \"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03\") " pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.289763 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl7nc\" (UniqueName: \"kubernetes.io/projected/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03-kube-api-access-pl7nc\") pod \"openstack-operator-index-wtvcq\" (UID: \"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03\") " pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.308974 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl7nc\" (UniqueName: \"kubernetes.io/projected/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03-kube-api-access-pl7nc\") pod \"openstack-operator-index-wtvcq\" (UID: \"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03\") " pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.337294 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.781121 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wtvcq"] Jan 28 16:02:00 crc kubenswrapper[4811]: W0128 16:02:00.787961 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebb1b765_ecd6_47db_89fe_3eb0b7a95b03.slice/crio-faf3a81002933703587b75dbba5bd7bdd98ed33026f63e968de11d47eef30899 WatchSource:0}: Error finding container faf3a81002933703587b75dbba5bd7bdd98ed33026f63e968de11d47eef30899: Status 404 returned error can't find the container with id faf3a81002933703587b75dbba5bd7bdd98ed33026f63e968de11d47eef30899 Jan 28 16:02:00 crc kubenswrapper[4811]: I0128 16:02:00.988661 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wtvcq" event={"ID":"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03","Type":"ContainerStarted","Data":"faf3a81002933703587b75dbba5bd7bdd98ed33026f63e968de11d47eef30899"} Jan 28 16:02:03 crc kubenswrapper[4811]: I0128 16:02:03.001354 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wtvcq" event={"ID":"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03","Type":"ContainerStarted","Data":"5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc"} Jan 28 16:02:03 crc kubenswrapper[4811]: I0128 16:02:03.021414 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wtvcq" podStartSLOduration=2.839710514 podStartE2EDuration="4.02139478s" podCreationTimestamp="2026-01-28 16:01:59 +0000 UTC" firstStartedPulling="2026-01-28 16:02:00.792680118 +0000 UTC m=+1013.547043711" lastFinishedPulling="2026-01-28 16:02:01.974364394 +0000 UTC m=+1014.728727977" observedRunningTime="2026-01-28 16:02:03.015023367 +0000 UTC m=+1015.769386950" watchObservedRunningTime="2026-01-28 16:02:03.02139478 +0000 UTC m=+1015.775758373" Jan 28 16:02:03 crc kubenswrapper[4811]: I0128 16:02:03.992782 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wtvcq"] Jan 28 16:02:04 crc kubenswrapper[4811]: I0128 16:02:04.800746 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-v8ftk"] Jan 28 16:02:04 crc kubenswrapper[4811]: I0128 16:02:04.803195 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:04 crc kubenswrapper[4811]: I0128 16:02:04.824572 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-v8ftk"] Jan 28 16:02:04 crc kubenswrapper[4811]: I0128 16:02:04.853194 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c4qx\" (UniqueName: \"kubernetes.io/projected/249e1e57-08ef-4961-b1a0-f95207b38667-kube-api-access-7c4qx\") pod \"openstack-operator-index-v8ftk\" (UID: \"249e1e57-08ef-4961-b1a0-f95207b38667\") " pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:04 crc kubenswrapper[4811]: I0128 16:02:04.955241 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c4qx\" (UniqueName: \"kubernetes.io/projected/249e1e57-08ef-4961-b1a0-f95207b38667-kube-api-access-7c4qx\") pod \"openstack-operator-index-v8ftk\" (UID: \"249e1e57-08ef-4961-b1a0-f95207b38667\") " pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:04 crc kubenswrapper[4811]: I0128 16:02:04.985651 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c4qx\" (UniqueName: \"kubernetes.io/projected/249e1e57-08ef-4961-b1a0-f95207b38667-kube-api-access-7c4qx\") pod \"openstack-operator-index-v8ftk\" (UID: \"249e1e57-08ef-4961-b1a0-f95207b38667\") " pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.017369 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-wtvcq" podUID="ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" containerName="registry-server" containerID="cri-o://5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc" gracePeriod=2 Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.130589 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.512912 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.561032 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-v8ftk"] Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.564987 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl7nc\" (UniqueName: \"kubernetes.io/projected/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03-kube-api-access-pl7nc\") pod \"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03\" (UID: \"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03\") " Jan 28 16:02:05 crc kubenswrapper[4811]: W0128 16:02:05.565119 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod249e1e57_08ef_4961_b1a0_f95207b38667.slice/crio-703c7b908a3048edb510802abcbf7be003a6cb7e4f0d54a03da1982f21c4c920 WatchSource:0}: Error finding container 703c7b908a3048edb510802abcbf7be003a6cb7e4f0d54a03da1982f21c4c920: Status 404 returned error can't find the container with id 703c7b908a3048edb510802abcbf7be003a6cb7e4f0d54a03da1982f21c4c920 Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.570238 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03-kube-api-access-pl7nc" (OuterVolumeSpecName: "kube-api-access-pl7nc") pod "ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" (UID: "ebb1b765-ecd6-47db-89fe-3eb0b7a95b03"). InnerVolumeSpecName "kube-api-access-pl7nc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:02:05 crc kubenswrapper[4811]: I0128 16:02:05.666981 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl7nc\" (UniqueName: \"kubernetes.io/projected/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03-kube-api-access-pl7nc\") on node \"crc\" DevicePath \"\"" Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.033016 4811 generic.go:334] "Generic (PLEG): container finished" podID="ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" containerID="5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc" exitCode=0 Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.033100 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wtvcq" Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.033176 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wtvcq" event={"ID":"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03","Type":"ContainerDied","Data":"5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc"} Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.033237 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wtvcq" event={"ID":"ebb1b765-ecd6-47db-89fe-3eb0b7a95b03","Type":"ContainerDied","Data":"faf3a81002933703587b75dbba5bd7bdd98ed33026f63e968de11d47eef30899"} Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.033292 4811 scope.go:117] "RemoveContainer" containerID="5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc" Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.034728 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-v8ftk" event={"ID":"249e1e57-08ef-4961-b1a0-f95207b38667","Type":"ContainerStarted","Data":"703c7b908a3048edb510802abcbf7be003a6cb7e4f0d54a03da1982f21c4c920"} Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.078220 4811 scope.go:117] "RemoveContainer" containerID="5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc" Jan 28 16:02:06 crc kubenswrapper[4811]: E0128 16:02:06.079205 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc\": container with ID starting with 5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc not found: ID does not exist" containerID="5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc" Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.079265 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc"} err="failed to get container status \"5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc\": rpc error: code = NotFound desc = could not find container \"5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc\": container with ID starting with 5451e73732b07761ebc2aab1b851645ed9742929e3cc290e3815608f92fb36dc not found: ID does not exist" Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.080843 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wtvcq"] Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.085530 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-wtvcq"] Jan 28 16:02:06 crc kubenswrapper[4811]: I0128 16:02:06.348657 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" path="/var/lib/kubelet/pods/ebb1b765-ecd6-47db-89fe-3eb0b7a95b03/volumes" Jan 28 16:02:07 crc kubenswrapper[4811]: I0128 16:02:07.042765 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-v8ftk" event={"ID":"249e1e57-08ef-4961-b1a0-f95207b38667","Type":"ContainerStarted","Data":"2743d0a9dc967ed8c67918d5429719cccdc71cd6d44e51a9945ecaa1b2a1917a"} Jan 28 16:02:07 crc kubenswrapper[4811]: I0128 16:02:07.056935 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-v8ftk" podStartSLOduration=2.523578348 podStartE2EDuration="3.056914707s" podCreationTimestamp="2026-01-28 16:02:04 +0000 UTC" firstStartedPulling="2026-01-28 16:02:05.569050103 +0000 UTC m=+1018.323413686" lastFinishedPulling="2026-01-28 16:02:06.102386462 +0000 UTC m=+1018.856750045" observedRunningTime="2026-01-28 16:02:07.054931702 +0000 UTC m=+1019.809295285" watchObservedRunningTime="2026-01-28 16:02:07.056914707 +0000 UTC m=+1019.811278290" Jan 28 16:02:15 crc kubenswrapper[4811]: I0128 16:02:15.130933 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:15 crc kubenswrapper[4811]: I0128 16:02:15.132084 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:15 crc kubenswrapper[4811]: I0128 16:02:15.175634 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:16 crc kubenswrapper[4811]: I0128 16:02:16.160251 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-v8ftk" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.653682 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc"] Jan 28 16:02:21 crc kubenswrapper[4811]: E0128 16:02:21.654321 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" containerName="registry-server" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.654338 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" containerName="registry-server" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.654527 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb1b765-ecd6-47db-89fe-3eb0b7a95b03" containerName="registry-server" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.655774 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.658283 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-rqg7h" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.670088 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc"] Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.811396 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-util\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.811525 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-bundle\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.811570 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km4b7\" (UniqueName: \"kubernetes.io/projected/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-kube-api-access-km4b7\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.913173 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-util\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.913234 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-bundle\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.913278 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km4b7\" (UniqueName: \"kubernetes.io/projected/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-kube-api-access-km4b7\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.913978 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-bundle\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.913980 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-util\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.938347 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km4b7\" (UniqueName: \"kubernetes.io/projected/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-kube-api-access-km4b7\") pod \"c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:21 crc kubenswrapper[4811]: I0128 16:02:21.973524 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:22 crc kubenswrapper[4811]: I0128 16:02:22.398867 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc"] Jan 28 16:02:23 crc kubenswrapper[4811]: I0128 16:02:23.186907 4811 generic.go:334] "Generic (PLEG): container finished" podID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerID="51b9a4892996e18c627fc4bb8a9bd05b5832b5c97305398cd7f98bed807923c8" exitCode=0 Jan 28 16:02:23 crc kubenswrapper[4811]: I0128 16:02:23.186992 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" event={"ID":"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b","Type":"ContainerDied","Data":"51b9a4892996e18c627fc4bb8a9bd05b5832b5c97305398cd7f98bed807923c8"} Jan 28 16:02:23 crc kubenswrapper[4811]: I0128 16:02:23.187050 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" event={"ID":"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b","Type":"ContainerStarted","Data":"9bf890fcfb17837df30dea795327f7c755cad219d86e128109781d49a5da748f"} Jan 28 16:02:24 crc kubenswrapper[4811]: I0128 16:02:24.194709 4811 generic.go:334] "Generic (PLEG): container finished" podID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerID="ebd809bbaa69ddf13fd3b5c854a19107d509a0ad9d3d05b06fb5be70fe79b2ac" exitCode=0 Jan 28 16:02:24 crc kubenswrapper[4811]: I0128 16:02:24.194771 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" event={"ID":"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b","Type":"ContainerDied","Data":"ebd809bbaa69ddf13fd3b5c854a19107d509a0ad9d3d05b06fb5be70fe79b2ac"} Jan 28 16:02:25 crc kubenswrapper[4811]: I0128 16:02:25.202742 4811 generic.go:334] "Generic (PLEG): container finished" podID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerID="e1893b7208047c06c7b1f7d03643c0cb27a079dfae081ae181b56c1ffa48eb40" exitCode=0 Jan 28 16:02:25 crc kubenswrapper[4811]: I0128 16:02:25.202780 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" event={"ID":"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b","Type":"ContainerDied","Data":"e1893b7208047c06c7b1f7d03643c0cb27a079dfae081ae181b56c1ffa48eb40"} Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.488980 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.683404 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km4b7\" (UniqueName: \"kubernetes.io/projected/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-kube-api-access-km4b7\") pod \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.683562 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-util\") pod \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.683606 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-bundle\") pod \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\" (UID: \"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b\") " Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.684977 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-bundle" (OuterVolumeSpecName: "bundle") pod "93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" (UID: "93b3aa9c-9717-4f9d-b535-eceb93b7eb9b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.688776 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-kube-api-access-km4b7" (OuterVolumeSpecName: "kube-api-access-km4b7") pod "93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" (UID: "93b3aa9c-9717-4f9d-b535-eceb93b7eb9b"). InnerVolumeSpecName "kube-api-access-km4b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.697589 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-util" (OuterVolumeSpecName: "util") pod "93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" (UID: "93b3aa9c-9717-4f9d-b535-eceb93b7eb9b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.784778 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km4b7\" (UniqueName: \"kubernetes.io/projected/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-kube-api-access-km4b7\") on node \"crc\" DevicePath \"\"" Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.785093 4811 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-util\") on node \"crc\" DevicePath \"\"" Jan 28 16:02:26 crc kubenswrapper[4811]: I0128 16:02:26.785105 4811 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b3aa9c-9717-4f9d-b535-eceb93b7eb9b-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:02:27 crc kubenswrapper[4811]: I0128 16:02:27.217777 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" event={"ID":"93b3aa9c-9717-4f9d-b535-eceb93b7eb9b","Type":"ContainerDied","Data":"9bf890fcfb17837df30dea795327f7c755cad219d86e128109781d49a5da748f"} Jan 28 16:02:27 crc kubenswrapper[4811]: I0128 16:02:27.217835 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bf890fcfb17837df30dea795327f7c755cad219d86e128109781d49a5da748f" Jan 28 16:02:27 crc kubenswrapper[4811]: I0128 16:02:27.217836 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.809871 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-554f878768-n49vx"] Jan 28 16:02:33 crc kubenswrapper[4811]: E0128 16:02:33.810497 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="extract" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.810513 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="extract" Jan 28 16:02:33 crc kubenswrapper[4811]: E0128 16:02:33.810527 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="pull" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.810536 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="pull" Jan 28 16:02:33 crc kubenswrapper[4811]: E0128 16:02:33.810562 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="util" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.810572 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="util" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.810716 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="93b3aa9c-9717-4f9d-b535-eceb93b7eb9b" containerName="extract" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.811200 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.813701 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-76hxp" Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.852262 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-554f878768-n49vx"] Jan 28 16:02:33 crc kubenswrapper[4811]: I0128 16:02:33.928345 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dx72\" (UniqueName: \"kubernetes.io/projected/aa18955f-899e-417a-8bd5-19a0b7e809e3-kube-api-access-9dx72\") pod \"openstack-operator-controller-init-554f878768-n49vx\" (UID: \"aa18955f-899e-417a-8bd5-19a0b7e809e3\") " pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:02:34 crc kubenswrapper[4811]: I0128 16:02:34.029677 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dx72\" (UniqueName: \"kubernetes.io/projected/aa18955f-899e-417a-8bd5-19a0b7e809e3-kube-api-access-9dx72\") pod \"openstack-operator-controller-init-554f878768-n49vx\" (UID: \"aa18955f-899e-417a-8bd5-19a0b7e809e3\") " pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:02:34 crc kubenswrapper[4811]: I0128 16:02:34.050210 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dx72\" (UniqueName: \"kubernetes.io/projected/aa18955f-899e-417a-8bd5-19a0b7e809e3-kube-api-access-9dx72\") pod \"openstack-operator-controller-init-554f878768-n49vx\" (UID: \"aa18955f-899e-417a-8bd5-19a0b7e809e3\") " pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:02:34 crc kubenswrapper[4811]: I0128 16:02:34.129691 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:02:34 crc kubenswrapper[4811]: I0128 16:02:34.447789 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-554f878768-n49vx"] Jan 28 16:02:35 crc kubenswrapper[4811]: I0128 16:02:35.276872 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" event={"ID":"aa18955f-899e-417a-8bd5-19a0b7e809e3","Type":"ContainerStarted","Data":"6eb5c0c98cb9ea80baedb12f3d691e96aef5b68c56723dbd4206c8b2678bf397"} Jan 28 16:02:39 crc kubenswrapper[4811]: I0128 16:02:39.320328 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" event={"ID":"aa18955f-899e-417a-8bd5-19a0b7e809e3","Type":"ContainerStarted","Data":"fd3365d3112c7af3b975934edb5f12e518c163c0c7fad9b784d2ef963a49f7be"} Jan 28 16:02:39 crc kubenswrapper[4811]: I0128 16:02:39.321025 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:02:39 crc kubenswrapper[4811]: I0128 16:02:39.351193 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" podStartSLOduration=2.194874056 podStartE2EDuration="6.351174949s" podCreationTimestamp="2026-01-28 16:02:33 +0000 UTC" firstStartedPulling="2026-01-28 16:02:34.460125093 +0000 UTC m=+1047.214488676" lastFinishedPulling="2026-01-28 16:02:38.616425986 +0000 UTC m=+1051.370789569" observedRunningTime="2026-01-28 16:02:39.347101589 +0000 UTC m=+1052.101465182" watchObservedRunningTime="2026-01-28 16:02:39.351174949 +0000 UTC m=+1052.105538542" Jan 28 16:02:44 crc kubenswrapper[4811]: I0128 16:02:44.132907 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-554f878768-n49vx" Jan 28 16:03:03 crc kubenswrapper[4811]: I0128 16:03:03.088138 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:03:03 crc kubenswrapper[4811]: I0128 16:03:03.088776 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.889355 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.890594 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.893371 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-qpbjs" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.900459 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.901268 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.902976 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-fgrxq" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.906147 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.915159 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.934125 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.935107 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.938260 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2vjgz" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.938477 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.956918 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.957922 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.962017 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-mjw7r" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.969820 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf"] Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.970814 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.973285 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-j6hqw" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.977187 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jv5f\" (UniqueName: \"kubernetes.io/projected/5d2909d9-5499-4119-886c-d389257e85fa-kube-api-access-6jv5f\") pod \"cinder-operator-controller-manager-7478f7dbf9-zqk5g\" (UID: \"5d2909d9-5499-4119-886c-d389257e85fa\") " pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.977233 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcbpg\" (UniqueName: \"kubernetes.io/projected/4621fc20-e68f-4c29-a01b-e62b2eda190c-kube-api-access-tcbpg\") pod \"glance-operator-controller-manager-78fdd796fd-drtqb\" (UID: \"4621fc20-e68f-4c29-a01b-e62b2eda190c\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.977294 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz726\" (UniqueName: \"kubernetes.io/projected/f8bca6a7-6aca-4dd1-8c9d-a358f932eab3-kube-api-access-mz726\") pod \"designate-operator-controller-manager-b45d7bf98-9k7l8\" (UID: \"f8bca6a7-6aca-4dd1-8c9d-a358f932eab3\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:21 crc kubenswrapper[4811]: I0128 16:03:21.977322 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrlj8\" (UniqueName: \"kubernetes.io/projected/0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3-kube-api-access-rrlj8\") pod \"barbican-operator-controller-manager-7f86f8796f-flj9q\" (UID: \"0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3\") " pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.004512 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.013560 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.026339 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-694cf4f878-92chv"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.027351 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.031721 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.032491 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-hgkqv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.041947 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.042739 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.047381 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-kdndt" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.052173 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.053256 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.056251 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xnp6f" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.062417 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.064333 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.066417 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-694cf4f878-92chv"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.070205 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-nzm2q" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.074655 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.078983 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h4vl\" (UniqueName: \"kubernetes.io/projected/f6539037-4781-4a9d-b13a-80e9c7f178ac-kube-api-access-6h4vl\") pod \"horizon-operator-controller-manager-77d5c5b54f-q5dc6\" (UID: \"f6539037-4781-4a9d-b13a-80e9c7f178ac\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079034 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jv5f\" (UniqueName: \"kubernetes.io/projected/5d2909d9-5499-4119-886c-d389257e85fa-kube-api-access-6jv5f\") pod \"cinder-operator-controller-manager-7478f7dbf9-zqk5g\" (UID: \"5d2909d9-5499-4119-886c-d389257e85fa\") " pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079061 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcbpg\" (UniqueName: \"kubernetes.io/projected/4621fc20-e68f-4c29-a01b-e62b2eda190c-kube-api-access-tcbpg\") pod \"glance-operator-controller-manager-78fdd796fd-drtqb\" (UID: \"4621fc20-e68f-4c29-a01b-e62b2eda190c\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079095 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079135 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz726\" (UniqueName: \"kubernetes.io/projected/f8bca6a7-6aca-4dd1-8c9d-a358f932eab3-kube-api-access-mz726\") pod \"designate-operator-controller-manager-b45d7bf98-9k7l8\" (UID: \"f8bca6a7-6aca-4dd1-8c9d-a358f932eab3\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079158 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl9fk\" (UniqueName: \"kubernetes.io/projected/5e5c9061-59c7-4984-a93f-a61ef1b582ba-kube-api-access-bl9fk\") pod \"keystone-operator-controller-manager-b8b6d4659-jxcrp\" (UID: \"5e5c9061-59c7-4984-a93f-a61ef1b582ba\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079181 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrlj8\" (UniqueName: \"kubernetes.io/projected/0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3-kube-api-access-rrlj8\") pod \"barbican-operator-controller-manager-7f86f8796f-flj9q\" (UID: \"0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3\") " pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079230 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvq56\" (UniqueName: \"kubernetes.io/projected/da7d2484-74a9-4915-b825-9c35586331e4-kube-api-access-xvq56\") pod \"ironic-operator-controller-manager-598f7747c9-v4znb\" (UID: \"da7d2484-74a9-4915-b825-9c35586331e4\") " pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079270 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btgb9\" (UniqueName: \"kubernetes.io/projected/e9614b09-655d-4426-8b74-772aa777cf4d-kube-api-access-btgb9\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.079295 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzw7t\" (UniqueName: \"kubernetes.io/projected/4fe875df-573b-4f98-bcaa-2f72a2a409cb-kube-api-access-tzw7t\") pod \"heat-operator-controller-manager-594c8c9d5d-t4nkf\" (UID: \"4fe875df-573b-4f98-bcaa-2f72a2a409cb\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.083424 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.107770 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.116484 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.117293 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.125036 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jv5f\" (UniqueName: \"kubernetes.io/projected/5d2909d9-5499-4119-886c-d389257e85fa-kube-api-access-6jv5f\") pod \"cinder-operator-controller-manager-7478f7dbf9-zqk5g\" (UID: \"5d2909d9-5499-4119-886c-d389257e85fa\") " pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.125481 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz726\" (UniqueName: \"kubernetes.io/projected/f8bca6a7-6aca-4dd1-8c9d-a358f932eab3-kube-api-access-mz726\") pod \"designate-operator-controller-manager-b45d7bf98-9k7l8\" (UID: \"f8bca6a7-6aca-4dd1-8c9d-a358f932eab3\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.126851 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrlj8\" (UniqueName: \"kubernetes.io/projected/0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3-kube-api-access-rrlj8\") pod \"barbican-operator-controller-manager-7f86f8796f-flj9q\" (UID: \"0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3\") " pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.127581 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcbpg\" (UniqueName: \"kubernetes.io/projected/4621fc20-e68f-4c29-a01b-e62b2eda190c-kube-api-access-tcbpg\") pod \"glance-operator-controller-manager-78fdd796fd-drtqb\" (UID: \"4621fc20-e68f-4c29-a01b-e62b2eda190c\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.130796 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-bmsb5" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.146238 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.147235 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.170161 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4ccnj" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.170306 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184391 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btgb9\" (UniqueName: \"kubernetes.io/projected/e9614b09-655d-4426-8b74-772aa777cf4d-kube-api-access-btgb9\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184446 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzw7t\" (UniqueName: \"kubernetes.io/projected/4fe875df-573b-4f98-bcaa-2f72a2a409cb-kube-api-access-tzw7t\") pod \"heat-operator-controller-manager-594c8c9d5d-t4nkf\" (UID: \"4fe875df-573b-4f98-bcaa-2f72a2a409cb\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184476 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmdbf\" (UniqueName: \"kubernetes.io/projected/66f46681-a51b-4c9e-9cc0-0a604a8ce3c0-kube-api-access-gmdbf\") pod \"manila-operator-controller-manager-78c6999f6f-dr9ff\" (UID: \"66f46681-a51b-4c9e-9cc0-0a604a8ce3c0\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184498 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h4vl\" (UniqueName: \"kubernetes.io/projected/f6539037-4781-4a9d-b13a-80e9c7f178ac-kube-api-access-6h4vl\") pod \"horizon-operator-controller-manager-77d5c5b54f-q5dc6\" (UID: \"f6539037-4781-4a9d-b13a-80e9c7f178ac\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184529 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh89l\" (UniqueName: \"kubernetes.io/projected/787f4d88-cda8-4515-a1b8-db763a44554e-kube-api-access-dh89l\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-trvzh\" (UID: \"787f4d88-cda8-4515-a1b8-db763a44554e\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184548 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184583 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl9fk\" (UniqueName: \"kubernetes.io/projected/5e5c9061-59c7-4984-a93f-a61ef1b582ba-kube-api-access-bl9fk\") pod \"keystone-operator-controller-manager-b8b6d4659-jxcrp\" (UID: \"5e5c9061-59c7-4984-a93f-a61ef1b582ba\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.184621 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvq56\" (UniqueName: \"kubernetes.io/projected/da7d2484-74a9-4915-b825-9c35586331e4-kube-api-access-xvq56\") pod \"ironic-operator-controller-manager-598f7747c9-v4znb\" (UID: \"da7d2484-74a9-4915-b825-9c35586331e4\") " pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:22 crc kubenswrapper[4811]: E0128 16:03:22.185352 4811 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:22 crc kubenswrapper[4811]: E0128 16:03:22.185400 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert podName:e9614b09-655d-4426-8b74-772aa777cf4d nodeName:}" failed. No retries permitted until 2026-01-28 16:03:22.685383483 +0000 UTC m=+1095.439747066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert") pod "infra-operator-controller-manager-694cf4f878-92chv" (UID: "e9614b09-655d-4426-8b74-772aa777cf4d") : secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.207091 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl9fk\" (UniqueName: \"kubernetes.io/projected/5e5c9061-59c7-4984-a93f-a61ef1b582ba-kube-api-access-bl9fk\") pod \"keystone-operator-controller-manager-b8b6d4659-jxcrp\" (UID: \"5e5c9061-59c7-4984-a93f-a61ef1b582ba\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.217859 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzw7t\" (UniqueName: \"kubernetes.io/projected/4fe875df-573b-4f98-bcaa-2f72a2a409cb-kube-api-access-tzw7t\") pod \"heat-operator-controller-manager-594c8c9d5d-t4nkf\" (UID: \"4fe875df-573b-4f98-bcaa-2f72a2a409cb\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.219058 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.222921 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvq56\" (UniqueName: \"kubernetes.io/projected/da7d2484-74a9-4915-b825-9c35586331e4-kube-api-access-xvq56\") pod \"ironic-operator-controller-manager-598f7747c9-v4znb\" (UID: \"da7d2484-74a9-4915-b825-9c35586331e4\") " pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.223813 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btgb9\" (UniqueName: \"kubernetes.io/projected/e9614b09-655d-4426-8b74-772aa777cf4d-kube-api-access-btgb9\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.224507 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h4vl\" (UniqueName: \"kubernetes.io/projected/f6539037-4781-4a9d-b13a-80e9c7f178ac-kube-api-access-6h4vl\") pod \"horizon-operator-controller-manager-77d5c5b54f-q5dc6\" (UID: \"f6539037-4781-4a9d-b13a-80e9c7f178ac\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.224564 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.232794 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.255384 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.267535 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.278708 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.282481 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-ptkxr" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.285712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtvhs\" (UniqueName: \"kubernetes.io/projected/fc9fd651-c991-4202-911d-26e9b037c636-kube-api-access-wtvhs\") pod \"neutron-operator-controller-manager-78d58447c5-9x8xg\" (UID: \"fc9fd651-c991-4202-911d-26e9b037c636\") " pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.285795 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmdbf\" (UniqueName: \"kubernetes.io/projected/66f46681-a51b-4c9e-9cc0-0a604a8ce3c0-kube-api-access-gmdbf\") pod \"manila-operator-controller-manager-78c6999f6f-dr9ff\" (UID: \"66f46681-a51b-4c9e-9cc0-0a604a8ce3c0\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.285828 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh89l\" (UniqueName: \"kubernetes.io/projected/787f4d88-cda8-4515-a1b8-db763a44554e-kube-api-access-dh89l\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-trvzh\" (UID: \"787f4d88-cda8-4515-a1b8-db763a44554e\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.289503 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.306652 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.309320 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmdbf\" (UniqueName: \"kubernetes.io/projected/66f46681-a51b-4c9e-9cc0-0a604a8ce3c0-kube-api-access-gmdbf\") pod \"manila-operator-controller-manager-78c6999f6f-dr9ff\" (UID: \"66f46681-a51b-4c9e-9cc0-0a604a8ce3c0\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.310010 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh89l\" (UniqueName: \"kubernetes.io/projected/787f4d88-cda8-4515-a1b8-db763a44554e-kube-api-access-dh89l\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-trvzh\" (UID: \"787f4d88-cda8-4515-a1b8-db763a44554e\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.322913 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.335951 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.336848 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.340780 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-8w55c" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.354374 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.355384 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.355409 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.355573 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.360491 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.361404 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.363838 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-xbjjf" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.364788 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.364807 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-x4sg8" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.389005 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.397951 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.398260 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtvhs\" (UniqueName: \"kubernetes.io/projected/fc9fd651-c991-4202-911d-26e9b037c636-kube-api-access-wtvhs\") pod \"neutron-operator-controller-manager-78d58447c5-9x8xg\" (UID: \"fc9fd651-c991-4202-911d-26e9b037c636\") " pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.402938 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.415798 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.416713 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.417191 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.431218 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-cr2mb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.431658 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-z8tjb" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.431680 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.464052 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtvhs\" (UniqueName: \"kubernetes.io/projected/fc9fd651-c991-4202-911d-26e9b037c636-kube-api-access-wtvhs\") pod \"neutron-operator-controller-manager-78d58447c5-9x8xg\" (UID: \"fc9fd651-c991-4202-911d-26e9b037c636\") " pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.470800 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.501173 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.501235 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddq42\" (UniqueName: \"kubernetes.io/projected/59c4af9d-14e6-4d10-aa40-5f300ca20242-kube-api-access-ddq42\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.501327 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnsgz\" (UniqueName: \"kubernetes.io/projected/12256989-9520-4379-9e55-3c11ab97993c-kube-api-access-cnsgz\") pod \"octavia-operator-controller-manager-5f4cd88d46-5kczk\" (UID: \"12256989-9520-4379-9e55-3c11ab97993c\") " pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.501380 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x66x\" (UniqueName: \"kubernetes.io/projected/7b0b495e-25ab-48c9-9ee7-494295ed7316-kube-api-access-4x66x\") pod \"nova-operator-controller-manager-7bdb645866-mv6x7\" (UID: \"7b0b495e-25ab-48c9-9ee7-494295ed7316\") " pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.502134 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.502603 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.525794 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.539962 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.540779 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.552541 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.557770 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-4g74j" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.565017 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.566100 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.577625 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-n4b57" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.604604 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnsgz\" (UniqueName: \"kubernetes.io/projected/12256989-9520-4379-9e55-3c11ab97993c-kube-api-access-cnsgz\") pod \"octavia-operator-controller-manager-5f4cd88d46-5kczk\" (UID: \"12256989-9520-4379-9e55-3c11ab97993c\") " pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.604850 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsbll\" (UniqueName: \"kubernetes.io/projected/ec09ccdf-ab1a-46ef-9a34-0d25f21e5205-kube-api-access-nsbll\") pod \"ovn-operator-controller-manager-6f75f45d54-jxjn5\" (UID: \"ec09ccdf-ab1a-46ef-9a34-0d25f21e5205\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.604966 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x66x\" (UniqueName: \"kubernetes.io/projected/7b0b495e-25ab-48c9-9ee7-494295ed7316-kube-api-access-4x66x\") pod \"nova-operator-controller-manager-7bdb645866-mv6x7\" (UID: \"7b0b495e-25ab-48c9-9ee7-494295ed7316\") " pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.605059 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf64g\" (UniqueName: \"kubernetes.io/projected/c2dcc00d-c6c6-4546-8aed-b1ccceb9534f-kube-api-access-lf64g\") pod \"placement-operator-controller-manager-79d5ccc684-znx6t\" (UID: \"c2dcc00d-c6c6-4546-8aed-b1ccceb9534f\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.605142 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.605241 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddq42\" (UniqueName: \"kubernetes.io/projected/59c4af9d-14e6-4d10-aa40-5f300ca20242-kube-api-access-ddq42\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:22 crc kubenswrapper[4811]: E0128 16:03:22.605984 4811 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:22 crc kubenswrapper[4811]: E0128 16:03:22.606038 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert podName:59c4af9d-14e6-4d10-aa40-5f300ca20242 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:23.106020486 +0000 UTC m=+1095.860384069 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert") pod "openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" (UID: "59c4af9d-14e6-4d10-aa40-5f300ca20242") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.606237 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.610512 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.646792 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.651093 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.652005 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.654762 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-wbxfp" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.658133 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x66x\" (UniqueName: \"kubernetes.io/projected/7b0b495e-25ab-48c9-9ee7-494295ed7316-kube-api-access-4x66x\") pod \"nova-operator-controller-manager-7bdb645866-mv6x7\" (UID: \"7b0b495e-25ab-48c9-9ee7-494295ed7316\") " pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.674581 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.676148 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnsgz\" (UniqueName: \"kubernetes.io/projected/12256989-9520-4379-9e55-3c11ab97993c-kube-api-access-cnsgz\") pod \"octavia-operator-controller-manager-5f4cd88d46-5kczk\" (UID: \"12256989-9520-4379-9e55-3c11ab97993c\") " pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.678392 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddq42\" (UniqueName: \"kubernetes.io/projected/59c4af9d-14e6-4d10-aa40-5f300ca20242-kube-api-access-ddq42\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.685617 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.699746 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.702318 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-lxmgs"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.703314 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.705879 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-4jqfh" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.706398 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42z2f\" (UniqueName: \"kubernetes.io/projected/def156ed-ad39-4f23-aa51-bd1d36b35543-kube-api-access-42z2f\") pod \"telemetry-operator-controller-manager-85cd9769bb-gkhtq\" (UID: \"def156ed-ad39-4f23-aa51-bd1d36b35543\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.706566 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsbll\" (UniqueName: \"kubernetes.io/projected/ec09ccdf-ab1a-46ef-9a34-0d25f21e5205-kube-api-access-nsbll\") pod \"ovn-operator-controller-manager-6f75f45d54-jxjn5\" (UID: \"ec09ccdf-ab1a-46ef-9a34-0d25f21e5205\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.706663 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plqxf\" (UniqueName: \"kubernetes.io/projected/6fe8a24f-c7b1-4b27-908e-996e66803e0a-kube-api-access-plqxf\") pod \"swift-operator-controller-manager-547cbdb99f-r7j2f\" (UID: \"6fe8a24f-c7b1-4b27-908e-996e66803e0a\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.706751 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf64g\" (UniqueName: \"kubernetes.io/projected/c2dcc00d-c6c6-4546-8aed-b1ccceb9534f-kube-api-access-lf64g\") pod \"placement-operator-controller-manager-79d5ccc684-znx6t\" (UID: \"c2dcc00d-c6c6-4546-8aed-b1ccceb9534f\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.706866 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:22 crc kubenswrapper[4811]: E0128 16:03:22.707182 4811 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:22 crc kubenswrapper[4811]: E0128 16:03:22.707301 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert podName:e9614b09-655d-4426-8b74-772aa777cf4d nodeName:}" failed. No retries permitted until 2026-01-28 16:03:23.707283986 +0000 UTC m=+1096.461647569 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert") pod "infra-operator-controller-manager-694cf4f878-92chv" (UID: "e9614b09-655d-4426-8b74-772aa777cf4d") : secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.723498 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-lxmgs"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.746776 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf64g\" (UniqueName: \"kubernetes.io/projected/c2dcc00d-c6c6-4546-8aed-b1ccceb9534f-kube-api-access-lf64g\") pod \"placement-operator-controller-manager-79d5ccc684-znx6t\" (UID: \"c2dcc00d-c6c6-4546-8aed-b1ccceb9534f\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.748022 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsbll\" (UniqueName: \"kubernetes.io/projected/ec09ccdf-ab1a-46ef-9a34-0d25f21e5205-kube-api-access-nsbll\") pod \"ovn-operator-controller-manager-6f75f45d54-jxjn5\" (UID: \"ec09ccdf-ab1a-46ef-9a34-0d25f21e5205\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.799342 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.810766 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.824967 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plqxf\" (UniqueName: \"kubernetes.io/projected/6fe8a24f-c7b1-4b27-908e-996e66803e0a-kube-api-access-plqxf\") pod \"swift-operator-controller-manager-547cbdb99f-r7j2f\" (UID: \"6fe8a24f-c7b1-4b27-908e-996e66803e0a\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.825130 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp66w\" (UniqueName: \"kubernetes.io/projected/003629ab-e0b0-49b2-a4ab-f9cf7b67b588-kube-api-access-xp66w\") pod \"watcher-operator-controller-manager-564965969-lxmgs\" (UID: \"003629ab-e0b0-49b2-a4ab-f9cf7b67b588\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.825706 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gnmc\" (UniqueName: \"kubernetes.io/projected/548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0-kube-api-access-6gnmc\") pod \"test-operator-controller-manager-69797bbcbd-22cf4\" (UID: \"548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.825784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42z2f\" (UniqueName: \"kubernetes.io/projected/def156ed-ad39-4f23-aa51-bd1d36b35543-kube-api-access-42z2f\") pod \"telemetry-operator-controller-manager-85cd9769bb-gkhtq\" (UID: \"def156ed-ad39-4f23-aa51-bd1d36b35543\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.849113 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.851134 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.852862 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42z2f\" (UniqueName: \"kubernetes.io/projected/def156ed-ad39-4f23-aa51-bd1d36b35543-kube-api-access-42z2f\") pod \"telemetry-operator-controller-manager-85cd9769bb-gkhtq\" (UID: \"def156ed-ad39-4f23-aa51-bd1d36b35543\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.853329 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.860239 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.860574 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-pz96d" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.872831 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plqxf\" (UniqueName: \"kubernetes.io/projected/6fe8a24f-c7b1-4b27-908e-996e66803e0a-kube-api-access-plqxf\") pod \"swift-operator-controller-manager-547cbdb99f-r7j2f\" (UID: \"6fe8a24f-c7b1-4b27-908e-996e66803e0a\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.883183 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.890730 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.897017 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.906657 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-gfjnz" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.923986 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj"] Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.926729 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.926764 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb98r\" (UniqueName: \"kubernetes.io/projected/1218f34b-3146-4128-a086-4848855a5c92-kube-api-access-sb98r\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.926813 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.926834 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp66w\" (UniqueName: \"kubernetes.io/projected/003629ab-e0b0-49b2-a4ab-f9cf7b67b588-kube-api-access-xp66w\") pod \"watcher-operator-controller-manager-564965969-lxmgs\" (UID: \"003629ab-e0b0-49b2-a4ab-f9cf7b67b588\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.926870 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gnmc\" (UniqueName: \"kubernetes.io/projected/548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0-kube-api-access-6gnmc\") pod \"test-operator-controller-manager-69797bbcbd-22cf4\" (UID: \"548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.926892 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t5wg\" (UniqueName: \"kubernetes.io/projected/7808e709-5b75-45bb-bf62-5756261a4279-kube-api-access-5t5wg\") pod \"rabbitmq-cluster-operator-manager-668c99d594-8vvwj\" (UID: \"7808e709-5b75-45bb-bf62-5756261a4279\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.947638 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp66w\" (UniqueName: \"kubernetes.io/projected/003629ab-e0b0-49b2-a4ab-f9cf7b67b588-kube-api-access-xp66w\") pod \"watcher-operator-controller-manager-564965969-lxmgs\" (UID: \"003629ab-e0b0-49b2-a4ab-f9cf7b67b588\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:03:22 crc kubenswrapper[4811]: I0128 16:03:22.947884 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gnmc\" (UniqueName: \"kubernetes.io/projected/548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0-kube-api-access-6gnmc\") pod \"test-operator-controller-manager-69797bbcbd-22cf4\" (UID: \"548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.032070 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.032149 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t5wg\" (UniqueName: \"kubernetes.io/projected/7808e709-5b75-45bb-bf62-5756261a4279-kube-api-access-5t5wg\") pod \"rabbitmq-cluster-operator-manager-668c99d594-8vvwj\" (UID: \"7808e709-5b75-45bb-bf62-5756261a4279\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.032196 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.032216 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb98r\" (UniqueName: \"kubernetes.io/projected/1218f34b-3146-4128-a086-4848855a5c92-kube-api-access-sb98r\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.032212 4811 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.032315 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:23.53229876 +0000 UTC m=+1096.286662343 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.032256 4811 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.032473 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:23.532466524 +0000 UTC m=+1096.286830107 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "metrics-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.056165 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t5wg\" (UniqueName: \"kubernetes.io/projected/7808e709-5b75-45bb-bf62-5756261a4279-kube-api-access-5t5wg\") pod \"rabbitmq-cluster-operator-manager-668c99d594-8vvwj\" (UID: \"7808e709-5b75-45bb-bf62-5756261a4279\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.056225 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb98r\" (UniqueName: \"kubernetes.io/projected/1218f34b-3146-4128-a086-4848855a5c92-kube-api-access-sb98r\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.087244 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.114703 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.134079 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.134350 4811 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.134409 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert podName:59c4af9d-14e6-4d10-aa40-5f300ca20242 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:24.134392923 +0000 UTC m=+1096.888756506 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert") pod "openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" (UID: "59c4af9d-14e6-4d10-aa40-5f300ca20242") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.151776 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.188731 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.239292 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.366411 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q"] Jan 28 16:03:23 crc kubenswrapper[4811]: W0128 16:03:23.376712 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d2909d9_5499_4119_886c_d389257e85fa.slice/crio-fa21916ccf2f4e00318a983e25cb1232aef233d653bbc6180b7b7718726ed84c WatchSource:0}: Error finding container fa21916ccf2f4e00318a983e25cb1232aef233d653bbc6180b7b7718726ed84c: Status 404 returned error can't find the container with id fa21916ccf2f4e00318a983e25cb1232aef233d653bbc6180b7b7718726ed84c Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.388895 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8"] Jan 28 16:03:23 crc kubenswrapper[4811]: W0128 16:03:23.394899 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8bca6a7_6aca_4dd1_8c9d_a358f932eab3.slice/crio-db305a318f79eaa237dfd0ddfa08f719b1ca5e23a77dcfbb7ab90dbbeb3f1553 WatchSource:0}: Error finding container db305a318f79eaa237dfd0ddfa08f719b1ca5e23a77dcfbb7ab90dbbeb3f1553: Status 404 returned error can't find the container with id db305a318f79eaa237dfd0ddfa08f719b1ca5e23a77dcfbb7ab90dbbeb3f1553 Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.399974 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.477410 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb"] Jan 28 16:03:23 crc kubenswrapper[4811]: W0128 16:03:23.486903 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4621fc20_e68f_4c29_a01b_e62b2eda190c.slice/crio-4a127f51b5a9c36fbf4d23911817dd92059144dee899b4ae8f2d0de58a7beafd WatchSource:0}: Error finding container 4a127f51b5a9c36fbf4d23911817dd92059144dee899b4ae8f2d0de58a7beafd: Status 404 returned error can't find the container with id 4a127f51b5a9c36fbf4d23911817dd92059144dee899b4ae8f2d0de58a7beafd Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.491408 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf"] Jan 28 16:03:23 crc kubenswrapper[4811]: W0128 16:03:23.499565 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fe875df_573b_4f98_bcaa_2f72a2a409cb.slice/crio-6b797933b97e1ae0bc5fe4ca1f742fe01c8a20119259beb2a580dfa10f16b0ee WatchSource:0}: Error finding container 6b797933b97e1ae0bc5fe4ca1f742fe01c8a20119259beb2a580dfa10f16b0ee: Status 404 returned error can't find the container with id 6b797933b97e1ae0bc5fe4ca1f742fe01c8a20119259beb2a580dfa10f16b0ee Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.540117 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.540196 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.540241 4811 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.540317 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:24.540299828 +0000 UTC m=+1097.294663411 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "metrics-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.540326 4811 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.540375 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:24.540358969 +0000 UTC m=+1097.294722612 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.626399 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" event={"ID":"4621fc20-e68f-4c29-a01b-e62b2eda190c","Type":"ContainerStarted","Data":"4a127f51b5a9c36fbf4d23911817dd92059144dee899b4ae8f2d0de58a7beafd"} Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.632870 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" event={"ID":"f8bca6a7-6aca-4dd1-8c9d-a358f932eab3","Type":"ContainerStarted","Data":"db305a318f79eaa237dfd0ddfa08f719b1ca5e23a77dcfbb7ab90dbbeb3f1553"} Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.634876 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" event={"ID":"4fe875df-573b-4f98-bcaa-2f72a2a409cb","Type":"ContainerStarted","Data":"6b797933b97e1ae0bc5fe4ca1f742fe01c8a20119259beb2a580dfa10f16b0ee"} Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.635947 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" event={"ID":"0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3","Type":"ContainerStarted","Data":"9a5e86e7aa84d0a4229a5eb388740038b925d727f516a39199e4a1b9da13446b"} Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.637304 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" event={"ID":"5d2909d9-5499-4119-886c-d389257e85fa","Type":"ContainerStarted","Data":"fa21916ccf2f4e00318a983e25cb1232aef233d653bbc6180b7b7718726ed84c"} Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.742765 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.743026 4811 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.743118 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert podName:e9614b09-655d-4426-8b74-772aa777cf4d nodeName:}" failed. No retries permitted until 2026-01-28 16:03:25.743094986 +0000 UTC m=+1098.497458569 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert") pod "infra-operator-controller-manager-694cf4f878-92chv" (UID: "e9614b09-655d-4426-8b74-772aa777cf4d") : secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.869633 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.876402 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb"] Jan 28 16:03:23 crc kubenswrapper[4811]: W0128 16:03:23.900291 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda7d2484_74a9_4915_b825_9c35586331e4.slice/crio-06c481b9d532dd014240a4f47ac49f8b9ccaad7cb7805a0ef25f898c3a66204f WatchSource:0}: Error finding container 06c481b9d532dd014240a4f47ac49f8b9ccaad7cb7805a0ef25f898c3a66204f: Status 404 returned error can't find the container with id 06c481b9d532dd014240a4f47ac49f8b9ccaad7cb7805a0ef25f898c3a66204f Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.925591 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7"] Jan 28 16:03:23 crc kubenswrapper[4811]: W0128 16:03:23.938789 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec09ccdf_ab1a_46ef_9a34_0d25f21e5205.slice/crio-66441af7bc7d8909d653a011f8036bf4e3289d473d845a01cb43c8cc25049417 WatchSource:0}: Error finding container 66441af7bc7d8909d653a011f8036bf4e3289d473d845a01cb43c8cc25049417: Status 404 returned error can't find the container with id 66441af7bc7d8909d653a011f8036bf4e3289d473d845a01cb43c8cc25049417 Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.955277 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.963440 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.967823 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.973635 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.979274 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk"] Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.983630 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6"] Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.985141 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-plqxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-r7j2f_openstack-operators(6fe8a24f-c7b1-4b27-908e-996e66803e0a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.986617 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" podUID="6fe8a24f-c7b1-4b27-908e-996e66803e0a" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.987383 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:b673f00227298dcfa89abb46f8296a0825add42da41e8a4bf4dd13367c738d84,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dh89l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-6b9fb5fdcb-trvzh_openstack-operators(787f4d88-cda8-4515-a1b8-db763a44554e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.988173 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh"] Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.988533 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" podUID="787f4d88-cda8-4515-a1b8-db763a44554e" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.990714 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:013c0ad82d21a21c7eece5cd4b5d5c4b8eb410b6671ac33a6f3fb78c8510811d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lf64g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-79d5ccc684-znx6t_openstack-operators(c2dcc00d-c6c6-4546-8aed-b1ccceb9534f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.992902 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" podUID="c2dcc00d-c6c6-4546-8aed-b1ccceb9534f" Jan 28 16:03:23 crc kubenswrapper[4811]: I0128 16:03:23.992964 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq"] Jan 28 16:03:23 crc kubenswrapper[4811]: E0128 16:03:23.996094 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xp66w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-lxmgs_openstack-operators(003629ab-e0b0-49b2-a4ab-f9cf7b67b588): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:23.997530 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" podUID="003629ab-e0b0-49b2-a4ab-f9cf7b67b588" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:23.997591 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f"] Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.002320 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-lxmgs"] Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.007925 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4"] Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.010230 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6gnmc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-69797bbcbd-22cf4_openstack-operators(548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.010265 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-42z2f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-85cd9769bb-gkhtq_openstack-operators(def156ed-ad39-4f23-aa51-bd1d36b35543): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.010336 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5t5wg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-8vvwj_openstack-operators(7808e709-5b75-45bb-bf62-5756261a4279): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.011731 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" podUID="7808e709-5b75-45bb-bf62-5756261a4279" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.011817 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" podUID="548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.011869 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj"] Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.011876 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" podUID="def156ed-ad39-4f23-aa51-bd1d36b35543" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.150344 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.150566 4811 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.151693 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert podName:59c4af9d-14e6-4d10-aa40-5f300ca20242 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:26.151671113 +0000 UTC m=+1098.906034696 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert") pod "openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" (UID: "59c4af9d-14e6-4d10-aa40-5f300ca20242") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.555009 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.555083 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.555224 4811 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.555316 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:26.555295247 +0000 UTC m=+1099.309658880 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "metrics-server-cert" not found Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.555228 4811 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.555385 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:26.555371749 +0000 UTC m=+1099.309735332 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "webhook-server-cert" not found Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.646856 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" event={"ID":"ec09ccdf-ab1a-46ef-9a34-0d25f21e5205","Type":"ContainerStarted","Data":"66441af7bc7d8909d653a011f8036bf4e3289d473d845a01cb43c8cc25049417"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.647759 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" event={"ID":"7b0b495e-25ab-48c9-9ee7-494295ed7316","Type":"ContainerStarted","Data":"93e6f0ffca0fe362a84344ea7ccfdf214aa73747fc95acc9438ca4e6f434be04"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.649005 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" event={"ID":"da7d2484-74a9-4915-b825-9c35586331e4","Type":"ContainerStarted","Data":"06c481b9d532dd014240a4f47ac49f8b9ccaad7cb7805a0ef25f898c3a66204f"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.654074 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" event={"ID":"5e5c9061-59c7-4984-a93f-a61ef1b582ba","Type":"ContainerStarted","Data":"e13280d78fda5d2b8ebe0d184411a94c08dfa4cba7a793e968fd540bdd00c89e"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.656161 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" event={"ID":"787f4d88-cda8-4515-a1b8-db763a44554e","Type":"ContainerStarted","Data":"b7461fec7ad7fe318ea575cc1e7a714fd82f1435b365e4143c2228cde323dffb"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.657597 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" event={"ID":"6fe8a24f-c7b1-4b27-908e-996e66803e0a","Type":"ContainerStarted","Data":"5d256250e2c057ce7871ed86eff09d5025cb098a117028e71bbe69b5d0b5b52f"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.659779 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" podUID="6fe8a24f-c7b1-4b27-908e-996e66803e0a" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.661446 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" event={"ID":"548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0","Type":"ContainerStarted","Data":"ddde27de641e045530cb0ebdcb2052474ee6c42b3ea05dcfa44b9707d797d78b"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.675824 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:b673f00227298dcfa89abb46f8296a0825add42da41e8a4bf4dd13367c738d84\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" podUID="787f4d88-cda8-4515-a1b8-db763a44554e" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.686780 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" event={"ID":"12256989-9520-4379-9e55-3c11ab97993c","Type":"ContainerStarted","Data":"38b95845a3a5b2bd5ce1f2e6c4bab918cf542572b58f0ce6bf7ea8a32f30f51a"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.688335 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" podUID="548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.688891 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" event={"ID":"66f46681-a51b-4c9e-9cc0-0a604a8ce3c0","Type":"ContainerStarted","Data":"a3f5ce2c39ed25f9fc8ec10f2749856ad3c3e234e60065c704603b2a5a126484"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.704743 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" event={"ID":"7808e709-5b75-45bb-bf62-5756261a4279","Type":"ContainerStarted","Data":"afae23ff10903313684755010e0edeb28b9fd79f97815c4f13f99e97c48f8c38"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.706548 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" podUID="7808e709-5b75-45bb-bf62-5756261a4279" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.710642 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" event={"ID":"f6539037-4781-4a9d-b13a-80e9c7f178ac","Type":"ContainerStarted","Data":"723f6af776c764bea34be1dbd21f921e6bb2c50595695283f9717590f4c825c8"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.715098 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" event={"ID":"fc9fd651-c991-4202-911d-26e9b037c636","Type":"ContainerStarted","Data":"665c7343f2e734eb52159a9bfe166dbc8393b83ec3cf7bdd37ab8cb7bbc8d0ab"} Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.719948 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" event={"ID":"def156ed-ad39-4f23-aa51-bd1d36b35543","Type":"ContainerStarted","Data":"10fdbbad8deef194e678162bfa232a3b3baae1b201dcb84a57340a045bead8d9"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.721372 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" podUID="def156ed-ad39-4f23-aa51-bd1d36b35543" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.721991 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" event={"ID":"c2dcc00d-c6c6-4546-8aed-b1ccceb9534f","Type":"ContainerStarted","Data":"39b2ba55745eecb322d3979ceec98f98458324f809d3f7c9a4a6747114e8ee69"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.726947 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:013c0ad82d21a21c7eece5cd4b5d5c4b8eb410b6671ac33a6f3fb78c8510811d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" podUID="c2dcc00d-c6c6-4546-8aed-b1ccceb9534f" Jan 28 16:03:24 crc kubenswrapper[4811]: I0128 16:03:24.730068 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" event={"ID":"003629ab-e0b0-49b2-a4ab-f9cf7b67b588","Type":"ContainerStarted","Data":"fe1777fa69949b12f7bc3c3076874908dce909fdc693a072f1c7f87952c7e24b"} Jan 28 16:03:24 crc kubenswrapper[4811]: E0128 16:03:24.737979 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" podUID="003629ab-e0b0-49b2-a4ab-f9cf7b67b588" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.768690 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:b673f00227298dcfa89abb46f8296a0825add42da41e8a4bf4dd13367c738d84\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" podUID="787f4d88-cda8-4515-a1b8-db763a44554e" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.768715 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" podUID="003629ab-e0b0-49b2-a4ab-f9cf7b67b588" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.768720 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" podUID="548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.768755 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:013c0ad82d21a21c7eece5cd4b5d5c4b8eb410b6671ac33a6f3fb78c8510811d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" podUID="c2dcc00d-c6c6-4546-8aed-b1ccceb9534f" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.768812 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" podUID="def156ed-ad39-4f23-aa51-bd1d36b35543" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.768884 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" podUID="7808e709-5b75-45bb-bf62-5756261a4279" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.773242 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" podUID="6fe8a24f-c7b1-4b27-908e-996e66803e0a" Jan 28 16:03:25 crc kubenswrapper[4811]: I0128 16:03:25.791383 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.792088 4811 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:25 crc kubenswrapper[4811]: E0128 16:03:25.792240 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert podName:e9614b09-655d-4426-8b74-772aa777cf4d nodeName:}" failed. No retries permitted until 2026-01-28 16:03:29.792188309 +0000 UTC m=+1102.546551912 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert") pod "infra-operator-controller-manager-694cf4f878-92chv" (UID: "e9614b09-655d-4426-8b74-772aa777cf4d") : secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: I0128 16:03:26.199052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:26 crc kubenswrapper[4811]: E0128 16:03:26.199267 4811 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: E0128 16:03:26.199352 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert podName:59c4af9d-14e6-4d10-aa40-5f300ca20242 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:30.199330788 +0000 UTC m=+1102.953694371 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert") pod "openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" (UID: "59c4af9d-14e6-4d10-aa40-5f300ca20242") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: I0128 16:03:26.604590 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:26 crc kubenswrapper[4811]: E0128 16:03:26.604857 4811 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: E0128 16:03:26.604917 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:30.604898534 +0000 UTC m=+1103.359262117 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "webhook-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: E0128 16:03:26.605016 4811 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: E0128 16:03:26.605092 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:30.605074569 +0000 UTC m=+1103.359438152 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "metrics-server-cert" not found Jan 28 16:03:26 crc kubenswrapper[4811]: I0128 16:03:26.605243 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:29 crc kubenswrapper[4811]: I0128 16:03:29.865584 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:29 crc kubenswrapper[4811]: E0128 16:03:29.865803 4811 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:29 crc kubenswrapper[4811]: E0128 16:03:29.866133 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert podName:e9614b09-655d-4426-8b74-772aa777cf4d nodeName:}" failed. No retries permitted until 2026-01-28 16:03:37.866110831 +0000 UTC m=+1110.620474494 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert") pod "infra-operator-controller-manager-694cf4f878-92chv" (UID: "e9614b09-655d-4426-8b74-772aa777cf4d") : secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:30 crc kubenswrapper[4811]: I0128 16:03:30.285088 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:30 crc kubenswrapper[4811]: E0128 16:03:30.285364 4811 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:30 crc kubenswrapper[4811]: E0128 16:03:30.285462 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert podName:59c4af9d-14e6-4d10-aa40-5f300ca20242 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:38.285423518 +0000 UTC m=+1111.039787101 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert") pod "openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" (UID: "59c4af9d-14e6-4d10-aa40-5f300ca20242") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:30 crc kubenswrapper[4811]: I0128 16:03:30.691665 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:30 crc kubenswrapper[4811]: E0128 16:03:30.691837 4811 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 28 16:03:30 crc kubenswrapper[4811]: I0128 16:03:30.692150 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:30 crc kubenswrapper[4811]: E0128 16:03:30.692216 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:38.692196826 +0000 UTC m=+1111.446560399 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "webhook-server-cert" not found Jan 28 16:03:30 crc kubenswrapper[4811]: E0128 16:03:30.692274 4811 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 28 16:03:30 crc kubenswrapper[4811]: E0128 16:03:30.692343 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:38.692326649 +0000 UTC m=+1111.446690232 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "metrics-server-cert" not found Jan 28 16:03:33 crc kubenswrapper[4811]: I0128 16:03:33.087228 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:03:33 crc kubenswrapper[4811]: I0128 16:03:33.087898 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:03:35 crc kubenswrapper[4811]: E0128 16:03:35.660842 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:ed489f21a0c72557d2da5a271808f19b7c7b85ef32fd9f4aa91bdbfc5bca3bdd" Jan 28 16:03:35 crc kubenswrapper[4811]: E0128 16:03:35.661345 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:ed489f21a0c72557d2da5a271808f19b7c7b85ef32fd9f4aa91bdbfc5bca3bdd,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cnsgz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-5f4cd88d46-5kczk_openstack-operators(12256989-9520-4379-9e55-3c11ab97993c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:03:35 crc kubenswrapper[4811]: E0128 16:03:35.662989 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" podUID="12256989-9520-4379-9e55-3c11ab97993c" Jan 28 16:03:35 crc kubenswrapper[4811]: E0128 16:03:35.822276 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:ed489f21a0c72557d2da5a271808f19b7c7b85ef32fd9f4aa91bdbfc5bca3bdd\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" podUID="12256989-9520-4379-9e55-3c11ab97993c" Jan 28 16:03:36 crc kubenswrapper[4811]: E0128 16:03:36.877048 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337" Jan 28 16:03:36 crc kubenswrapper[4811]: E0128 16:03:36.877256 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tcbpg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-78fdd796fd-drtqb_openstack-operators(4621fc20-e68f-4c29-a01b-e62b2eda190c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:03:36 crc kubenswrapper[4811]: E0128 16:03:36.878491 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" podUID="4621fc20-e68f-4c29-a01b-e62b2eda190c" Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.532922 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:8abfbec47f0119a6c22c61a0ff80a4b1c6c14439a327bc75d4c529c5d8f59658" Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.533099 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:8abfbec47f0119a6c22c61a0ff80a4b1c6c14439a327bc75d4c529c5d8f59658,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4x66x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-7bdb645866-mv6x7_openstack-operators(7b0b495e-25ab-48c9-9ee7-494295ed7316): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.534915 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" podUID="7b0b495e-25ab-48c9-9ee7-494295ed7316" Jan 28 16:03:37 crc kubenswrapper[4811]: I0128 16:03:37.833830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" event={"ID":"da7d2484-74a9-4915-b825-9c35586331e4","Type":"ContainerStarted","Data":"784e034e8904e378be3ee5c576bf954ae70ca3739b7a3a651f1650bb47690e30"} Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.835476 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:8abfbec47f0119a6c22c61a0ff80a4b1c6c14439a327bc75d4c529c5d8f59658\\\"\"" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" podUID="7b0b495e-25ab-48c9-9ee7-494295ed7316" Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.836346 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337\\\"\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" podUID="4621fc20-e68f-4c29-a01b-e62b2eda190c" Jan 28 16:03:37 crc kubenswrapper[4811]: I0128 16:03:37.877548 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" podStartSLOduration=2.291812873 podStartE2EDuration="15.877527785s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.918723592 +0000 UTC m=+1096.673087175" lastFinishedPulling="2026-01-28 16:03:37.504438504 +0000 UTC m=+1110.258802087" observedRunningTime="2026-01-28 16:03:37.877014911 +0000 UTC m=+1110.631378494" watchObservedRunningTime="2026-01-28 16:03:37.877527785 +0000 UTC m=+1110.631891368" Jan 28 16:03:37 crc kubenswrapper[4811]: I0128 16:03:37.907078 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.907250 4811 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:37 crc kubenswrapper[4811]: E0128 16:03:37.907298 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert podName:e9614b09-655d-4426-8b74-772aa777cf4d nodeName:}" failed. No retries permitted until 2026-01-28 16:03:53.907282777 +0000 UTC m=+1126.661646360 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert") pod "infra-operator-controller-manager-694cf4f878-92chv" (UID: "e9614b09-655d-4426-8b74-772aa777cf4d") : secret "infra-operator-webhook-server-cert" not found Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.314743 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:38 crc kubenswrapper[4811]: E0128 16:03:38.314916 4811 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:38 crc kubenswrapper[4811]: E0128 16:03:38.314984 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert podName:59c4af9d-14e6-4d10-aa40-5f300ca20242 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:54.31496611 +0000 UTC m=+1127.069329693 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert") pod "openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" (UID: "59c4af9d-14e6-4d10-aa40-5f300ca20242") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.719924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:38 crc kubenswrapper[4811]: E0128 16:03:38.720224 4811 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.720374 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:38 crc kubenswrapper[4811]: E0128 16:03:38.720548 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs podName:1218f34b-3146-4128-a086-4848855a5c92 nodeName:}" failed. No retries permitted until 2026-01-28 16:03:54.720524746 +0000 UTC m=+1127.474888329 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs") pod "openstack-operator-controller-manager-9f67d7-pqwwm" (UID: "1218f34b-3146-4128-a086-4848855a5c92") : secret "webhook-server-cert" not found Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.730824 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-metrics-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.848846 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" event={"ID":"ec09ccdf-ab1a-46ef-9a34-0d25f21e5205","Type":"ContainerStarted","Data":"1a27a28b935934038be33dd6fdd7fe0c70a0ac0e21ebd9f3015a9c723b4b836a"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.848934 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.863964 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" event={"ID":"66f46681-a51b-4c9e-9cc0-0a604a8ce3c0","Type":"ContainerStarted","Data":"93e114da7627d07e34d56d34a3f1ffe86958b5ab1e1a6d5cea1ef002e196d72b"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.864705 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.866839 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" event={"ID":"787f4d88-cda8-4515-a1b8-db763a44554e","Type":"ContainerStarted","Data":"def021660894cea3c5f71f691e846432c656bc5fd39c979a924dd4958d838f20"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.867094 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.871717 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" event={"ID":"f8bca6a7-6aca-4dd1-8c9d-a358f932eab3","Type":"ContainerStarted","Data":"6c7477f13ec0fc4f745c9ec57f9a7349c9c11a10c45b16eba58855978f8bb77c"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.872204 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.879112 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" podStartSLOduration=3.249489475 podStartE2EDuration="16.879095621s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.943253693 +0000 UTC m=+1096.697617276" lastFinishedPulling="2026-01-28 16:03:37.572859839 +0000 UTC m=+1110.327223422" observedRunningTime="2026-01-28 16:03:38.873578612 +0000 UTC m=+1111.627942195" watchObservedRunningTime="2026-01-28 16:03:38.879095621 +0000 UTC m=+1111.633459194" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.891801 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" event={"ID":"5d2909d9-5499-4119-886c-d389257e85fa","Type":"ContainerStarted","Data":"ec29bc8066251efc6a786e7540361e068ce83504a96933733de704c14c625661"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.891964 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.898769 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" event={"ID":"f6539037-4781-4a9d-b13a-80e9c7f178ac","Type":"ContainerStarted","Data":"6535cfcbf85832cdb9e33c021e3fe658902cf7175107af3caa494e707084b80a"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.899452 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.901985 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" event={"ID":"fc9fd651-c991-4202-911d-26e9b037c636","Type":"ContainerStarted","Data":"885ae373b5f053d89aa6092fdceb32a99b35465614d752146aba2bbf67ac0105"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.902409 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.909584 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" podStartSLOduration=2.463387278 podStartE2EDuration="16.909565453s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.98728041 +0000 UTC m=+1096.741643983" lastFinishedPulling="2026-01-28 16:03:38.433458585 +0000 UTC m=+1111.187822158" observedRunningTime="2026-01-28 16:03:38.904471535 +0000 UTC m=+1111.658835118" watchObservedRunningTime="2026-01-28 16:03:38.909565453 +0000 UTC m=+1111.663929036" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.913261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" event={"ID":"4fe875df-573b-4f98-bcaa-2f72a2a409cb","Type":"ContainerStarted","Data":"7eb1b1615f784aff9863b60ceaa5c68ece902d045ada1e196511073a586571c9"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.913897 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.916301 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" event={"ID":"5e5c9061-59c7-4984-a93f-a61ef1b582ba","Type":"ContainerStarted","Data":"988784b8b946f62a23145ee3bc254b4f78380a0fe6080d6d1697ae68a98b6fae"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.916676 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.934225 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" event={"ID":"0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3","Type":"ContainerStarted","Data":"bba1236c7401e354ef20225055fb4bfedc8294dadf3b1e5dcaaf542bb5e5cd94"} Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.934267 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.934504 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:38 crc kubenswrapper[4811]: I0128 16:03:38.946398 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" podStartSLOduration=3.7790218490000003 podStartE2EDuration="17.946383746s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.40504807 +0000 UTC m=+1096.159411653" lastFinishedPulling="2026-01-28 16:03:37.572409967 +0000 UTC m=+1110.326773550" observedRunningTime="2026-01-28 16:03:38.945770099 +0000 UTC m=+1111.700133682" watchObservedRunningTime="2026-01-28 16:03:38.946383746 +0000 UTC m=+1111.700747319" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.003176 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" podStartSLOduration=3.421774342 podStartE2EDuration="17.003159657s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.967179749 +0000 UTC m=+1096.721543332" lastFinishedPulling="2026-01-28 16:03:37.548565064 +0000 UTC m=+1110.302928647" observedRunningTime="2026-01-28 16:03:38.977843754 +0000 UTC m=+1111.732207337" watchObservedRunningTime="2026-01-28 16:03:39.003159657 +0000 UTC m=+1111.757523240" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.006266 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" podStartSLOduration=3.370758016 podStartE2EDuration="17.00625869s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.93866585 +0000 UTC m=+1096.693029433" lastFinishedPulling="2026-01-28 16:03:37.574166524 +0000 UTC m=+1110.328530107" observedRunningTime="2026-01-28 16:03:38.992258963 +0000 UTC m=+1111.746622536" watchObservedRunningTime="2026-01-28 16:03:39.00625869 +0000 UTC m=+1111.760622273" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.076443 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" podStartSLOduration=3.399033917 podStartE2EDuration="17.076409352s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.896343308 +0000 UTC m=+1096.650706891" lastFinishedPulling="2026-01-28 16:03:37.573718743 +0000 UTC m=+1110.328082326" observedRunningTime="2026-01-28 16:03:39.043349101 +0000 UTC m=+1111.797712684" watchObservedRunningTime="2026-01-28 16:03:39.076409352 +0000 UTC m=+1111.830772935" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.078045 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" podStartSLOduration=3.924034179 podStartE2EDuration="18.078035265s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.375279108 +0000 UTC m=+1096.129642691" lastFinishedPulling="2026-01-28 16:03:37.529280194 +0000 UTC m=+1110.283643777" observedRunningTime="2026-01-28 16:03:39.075120987 +0000 UTC m=+1111.829484570" watchObservedRunningTime="2026-01-28 16:03:39.078035265 +0000 UTC m=+1111.832398848" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.101972 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" podStartSLOduration=4.568890116 podStartE2EDuration="18.10195851s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.97578334 +0000 UTC m=+1096.730146923" lastFinishedPulling="2026-01-28 16:03:37.508851734 +0000 UTC m=+1110.263215317" observedRunningTime="2026-01-28 16:03:39.100596424 +0000 UTC m=+1111.854960007" watchObservedRunningTime="2026-01-28 16:03:39.10195851 +0000 UTC m=+1111.856322093" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.133710 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" podStartSLOduration=4.105316938 podStartE2EDuration="18.133695577s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.501904613 +0000 UTC m=+1096.256268196" lastFinishedPulling="2026-01-28 16:03:37.530283252 +0000 UTC m=+1110.284646835" observedRunningTime="2026-01-28 16:03:39.131679103 +0000 UTC m=+1111.886042676" watchObservedRunningTime="2026-01-28 16:03:39.133695577 +0000 UTC m=+1111.888059150" Jan 28 16:03:39 crc kubenswrapper[4811]: I0128 16:03:39.167044 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" podStartSLOduration=4.038419935 podStartE2EDuration="18.167027876s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.380373106 +0000 UTC m=+1096.134736679" lastFinishedPulling="2026-01-28 16:03:37.508981037 +0000 UTC m=+1110.263344620" observedRunningTime="2026-01-28 16:03:39.163537921 +0000 UTC m=+1111.917901504" watchObservedRunningTime="2026-01-28 16:03:39.167027876 +0000 UTC m=+1111.921391459" Jan 28 16:03:42 crc kubenswrapper[4811]: I0128 16:03:42.402984 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-v4znb" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.227801 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-flj9q" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.235453 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-zqk5g" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.258006 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-9k7l8" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.308996 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-t4nkf" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.395403 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-q5dc6" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.473898 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-jxcrp" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.506638 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-dr9ff" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.608674 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-trvzh" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.650253 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-9x8xg" Jan 28 16:03:52 crc kubenswrapper[4811]: I0128 16:03:52.816911 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-jxjn5" Jan 28 16:03:53 crc kubenswrapper[4811]: I0128 16:03:53.959866 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:53 crc kubenswrapper[4811]: I0128 16:03:53.968325 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9614b09-655d-4426-8b74-772aa777cf4d-cert\") pod \"infra-operator-controller-manager-694cf4f878-92chv\" (UID: \"e9614b09-655d-4426-8b74-772aa777cf4d\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:54 crc kubenswrapper[4811]: I0128 16:03:54.167512 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:03:54 crc kubenswrapper[4811]: I0128 16:03:54.365201 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:54 crc kubenswrapper[4811]: I0128 16:03:54.372935 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/59c4af9d-14e6-4d10-aa40-5f300ca20242-cert\") pod \"openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd\" (UID: \"59c4af9d-14e6-4d10-aa40-5f300ca20242\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:54 crc kubenswrapper[4811]: I0128 16:03:54.520123 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:03:54 crc kubenswrapper[4811]: E0128 16:03:54.715348 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b" Jan 28 16:03:54 crc kubenswrapper[4811]: E0128 16:03:54.715671 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xp66w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-lxmgs_openstack-operators(003629ab-e0b0-49b2-a4ab-f9cf7b67b588): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:03:54 crc kubenswrapper[4811]: E0128 16:03:54.716909 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" podUID="003629ab-e0b0-49b2-a4ab-f9cf7b67b588" Jan 28 16:03:54 crc kubenswrapper[4811]: I0128 16:03:54.772784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:54 crc kubenswrapper[4811]: I0128 16:03:54.777567 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1218f34b-3146-4128-a086-4848855a5c92-webhook-certs\") pod \"openstack-operator-controller-manager-9f67d7-pqwwm\" (UID: \"1218f34b-3146-4128-a086-4848855a5c92\") " pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:55 crc kubenswrapper[4811]: I0128 16:03:55.002289 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:55 crc kubenswrapper[4811]: E0128 16:03:55.565022 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922" Jan 28 16:03:55 crc kubenswrapper[4811]: E0128 16:03:55.565535 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-plqxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-r7j2f_openstack-operators(6fe8a24f-c7b1-4b27-908e-996e66803e0a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:03:55 crc kubenswrapper[4811]: E0128 16:03:55.567281 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" podUID="6fe8a24f-c7b1-4b27-908e-996e66803e0a" Jan 28 16:03:56 crc kubenswrapper[4811]: E0128 16:03:56.134099 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127" Jan 28 16:03:56 crc kubenswrapper[4811]: E0128 16:03:56.134283 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-42z2f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-85cd9769bb-gkhtq_openstack-operators(def156ed-ad39-4f23-aa51-bd1d36b35543): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:03:56 crc kubenswrapper[4811]: E0128 16:03:56.135502 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" podUID="def156ed-ad39-4f23-aa51-bd1d36b35543" Jan 28 16:03:56 crc kubenswrapper[4811]: I0128 16:03:56.583941 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd"] Jan 28 16:03:56 crc kubenswrapper[4811]: I0128 16:03:56.684321 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-694cf4f878-92chv"] Jan 28 16:03:56 crc kubenswrapper[4811]: W0128 16:03:56.689245 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9614b09_655d_4426_8b74_772aa777cf4d.slice/crio-0dd42feda8697e1eb113cb54ac67ee56c952707419e3ac9b4bdb2e3361c09e6b WatchSource:0}: Error finding container 0dd42feda8697e1eb113cb54ac67ee56c952707419e3ac9b4bdb2e3361c09e6b: Status 404 returned error can't find the container with id 0dd42feda8697e1eb113cb54ac67ee56c952707419e3ac9b4bdb2e3361c09e6b Jan 28 16:03:56 crc kubenswrapper[4811]: W0128 16:03:56.884918 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1218f34b_3146_4128_a086_4848855a5c92.slice/crio-5226c404488a557ecea84a39dd94b064651f2fdb65aa9aa24568aa9b90b59510 WatchSource:0}: Error finding container 5226c404488a557ecea84a39dd94b064651f2fdb65aa9aa24568aa9b90b59510: Status 404 returned error can't find the container with id 5226c404488a557ecea84a39dd94b064651f2fdb65aa9aa24568aa9b90b59510 Jan 28 16:03:56 crc kubenswrapper[4811]: I0128 16:03:56.885096 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm"] Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.079295 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" event={"ID":"59c4af9d-14e6-4d10-aa40-5f300ca20242","Type":"ContainerStarted","Data":"41ae9c97fa8e4f19f01bf358e9a4af98d59f774a3157fe75ce8cf4c3c4a049d7"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.080510 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" event={"ID":"e9614b09-655d-4426-8b74-772aa777cf4d","Type":"ContainerStarted","Data":"0dd42feda8697e1eb113cb54ac67ee56c952707419e3ac9b4bdb2e3361c09e6b"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.082489 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" event={"ID":"12256989-9520-4379-9e55-3c11ab97993c","Type":"ContainerStarted","Data":"d4ac4543da4fe31cc21144127d70d662ebf46615b37f9c3c787eeb206821235a"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.082676 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.084400 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" event={"ID":"1218f34b-3146-4128-a086-4848855a5c92","Type":"ContainerStarted","Data":"7bbde774674acfe62c9f67da86d3dfe46548ea9522b586ac4bf39d76adab7473"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.084464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" event={"ID":"1218f34b-3146-4128-a086-4848855a5c92","Type":"ContainerStarted","Data":"5226c404488a557ecea84a39dd94b064651f2fdb65aa9aa24568aa9b90b59510"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.084639 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.087459 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" event={"ID":"7808e709-5b75-45bb-bf62-5756261a4279","Type":"ContainerStarted","Data":"569319867c22d3b39e204f07548006826f5c1e58cad1d1e31a93ba2ec27a23c9"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.089223 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" event={"ID":"7b0b495e-25ab-48c9-9ee7-494295ed7316","Type":"ContainerStarted","Data":"8fa5bf42d5b8d78c2792f68139a978f33851b2e6eca48f04ca370187d0674e12"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.089489 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.091068 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" event={"ID":"548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0","Type":"ContainerStarted","Data":"75a1c86e9e5b62d0b614b25f67defa6d967b5e887e52cfe36f6d9a1ae6d660f8"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.091272 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.099375 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" event={"ID":"c2dcc00d-c6c6-4546-8aed-b1ccceb9534f","Type":"ContainerStarted","Data":"3553fca44cff57a41452278af8addbeedf337e19b4663f1dded501d41866d4cc"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.100478 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.106263 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" event={"ID":"4621fc20-e68f-4c29-a01b-e62b2eda190c","Type":"ContainerStarted","Data":"a057d1f9797868394a5a9f5a2ccad3432ea73ff06dc964dfc63deb0beff54c91"} Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.106790 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.160424 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" podStartSLOduration=2.88108509 podStartE2EDuration="35.160407717s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.973232571 +0000 UTC m=+1096.727596154" lastFinishedPulling="2026-01-28 16:03:56.252555198 +0000 UTC m=+1129.006918781" observedRunningTime="2026-01-28 16:03:57.129517614 +0000 UTC m=+1129.883881217" watchObservedRunningTime="2026-01-28 16:03:57.160407717 +0000 UTC m=+1129.914771300" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.165345 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-8vvwj" podStartSLOduration=2.966257607 podStartE2EDuration="35.165331749s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:24.010142647 +0000 UTC m=+1096.764506230" lastFinishedPulling="2026-01-28 16:03:56.209216789 +0000 UTC m=+1128.963580372" observedRunningTime="2026-01-28 16:03:57.158143836 +0000 UTC m=+1129.912507419" watchObservedRunningTime="2026-01-28 16:03:57.165331749 +0000 UTC m=+1129.919695332" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.196568 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" podStartSLOduration=35.196552142 podStartE2EDuration="35.196552142s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:03:57.190782606 +0000 UTC m=+1129.945146189" watchObservedRunningTime="2026-01-28 16:03:57.196552142 +0000 UTC m=+1129.950915725" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.252815 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" podStartSLOduration=3.015213049 podStartE2EDuration="35.252789698s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:24.010083386 +0000 UTC m=+1096.764446969" lastFinishedPulling="2026-01-28 16:03:56.247660035 +0000 UTC m=+1129.002023618" observedRunningTime="2026-01-28 16:03:57.228647797 +0000 UTC m=+1129.983011380" watchObservedRunningTime="2026-01-28 16:03:57.252789698 +0000 UTC m=+1130.007153281" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.253617 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" podStartSLOduration=2.9645048320000003 podStartE2EDuration="35.253610961s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.96464469 +0000 UTC m=+1096.719008273" lastFinishedPulling="2026-01-28 16:03:56.253750819 +0000 UTC m=+1129.008114402" observedRunningTime="2026-01-28 16:03:57.251725499 +0000 UTC m=+1130.006089082" watchObservedRunningTime="2026-01-28 16:03:57.253610961 +0000 UTC m=+1130.007974544" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.276786 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" podStartSLOduration=3.028376093 podStartE2EDuration="35.276769795s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.990481547 +0000 UTC m=+1096.744845130" lastFinishedPulling="2026-01-28 16:03:56.238875249 +0000 UTC m=+1128.993238832" observedRunningTime="2026-01-28 16:03:57.27103913 +0000 UTC m=+1130.025402713" watchObservedRunningTime="2026-01-28 16:03:57.276769795 +0000 UTC m=+1130.031133378" Jan 28 16:03:57 crc kubenswrapper[4811]: I0128 16:03:57.289654 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" podStartSLOduration=3.531843513 podStartE2EDuration="36.289633691s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.488698576 +0000 UTC m=+1096.243062159" lastFinishedPulling="2026-01-28 16:03:56.246488754 +0000 UTC m=+1129.000852337" observedRunningTime="2026-01-28 16:03:57.288000947 +0000 UTC m=+1130.042364530" watchObservedRunningTime="2026-01-28 16:03:57.289633691 +0000 UTC m=+1130.043997274" Jan 28 16:04:00 crc kubenswrapper[4811]: I0128 16:04:00.134422 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" event={"ID":"59c4af9d-14e6-4d10-aa40-5f300ca20242","Type":"ContainerStarted","Data":"bae85c499c3b7d1f1d9accd1614d2208cc7af1f18de734df7145050e22e9cf48"} Jan 28 16:04:00 crc kubenswrapper[4811]: I0128 16:04:00.134975 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:04:00 crc kubenswrapper[4811]: I0128 16:04:00.136124 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" event={"ID":"e9614b09-655d-4426-8b74-772aa777cf4d","Type":"ContainerStarted","Data":"ec94caa1924ada52cbbce7afa0b5b331d888c0d961a65efe80d03ca5c94b1432"} Jan 28 16:04:00 crc kubenswrapper[4811]: I0128 16:04:00.136250 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:04:00 crc kubenswrapper[4811]: I0128 16:04:00.165289 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" podStartSLOduration=35.139418081 podStartE2EDuration="38.165268731s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:56.603653544 +0000 UTC m=+1129.358017127" lastFinishedPulling="2026-01-28 16:03:59.629504194 +0000 UTC m=+1132.383867777" observedRunningTime="2026-01-28 16:04:00.161119069 +0000 UTC m=+1132.915482662" watchObservedRunningTime="2026-01-28 16:04:00.165268731 +0000 UTC m=+1132.919632314" Jan 28 16:04:00 crc kubenswrapper[4811]: I0128 16:04:00.180366 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" podStartSLOduration=36.240520917 podStartE2EDuration="39.180348688s" podCreationTimestamp="2026-01-28 16:03:21 +0000 UTC" firstStartedPulling="2026-01-28 16:03:56.691490923 +0000 UTC m=+1129.445854506" lastFinishedPulling="2026-01-28 16:03:59.631318704 +0000 UTC m=+1132.385682277" observedRunningTime="2026-01-28 16:04:00.17375951 +0000 UTC m=+1132.928123103" watchObservedRunningTime="2026-01-28 16:04:00.180348688 +0000 UTC m=+1132.934712271" Jan 28 16:04:02 crc kubenswrapper[4811]: I0128 16:04:02.292377 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-drtqb" Jan 28 16:04:02 crc kubenswrapper[4811]: I0128 16:04:02.687276 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7bdb645866-mv6x7" Jan 28 16:04:02 crc kubenswrapper[4811]: I0128 16:04:02.707462 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-5kczk" Jan 28 16:04:02 crc kubenswrapper[4811]: I0128 16:04:02.802425 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-znx6t" Jan 28 16:04:03 crc kubenswrapper[4811]: I0128 16:04:03.087750 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:04:03 crc kubenswrapper[4811]: I0128 16:04:03.087815 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:04:03 crc kubenswrapper[4811]: I0128 16:04:03.087868 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:04:03 crc kubenswrapper[4811]: I0128 16:04:03.088636 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ced9aabb8b9dead9319c14576d709e760cf8c817715a608c130685f02a1c32b6"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:04:03 crc kubenswrapper[4811]: I0128 16:04:03.088702 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://ced9aabb8b9dead9319c14576d709e760cf8c817715a608c130685f02a1c32b6" gracePeriod=600 Jan 28 16:04:03 crc kubenswrapper[4811]: I0128 16:04:03.158413 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-22cf4" Jan 28 16:04:04 crc kubenswrapper[4811]: I0128 16:04:04.182497 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="ced9aabb8b9dead9319c14576d709e760cf8c817715a608c130685f02a1c32b6" exitCode=0 Jan 28 16:04:04 crc kubenswrapper[4811]: I0128 16:04:04.182572 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"ced9aabb8b9dead9319c14576d709e760cf8c817715a608c130685f02a1c32b6"} Jan 28 16:04:04 crc kubenswrapper[4811]: I0128 16:04:04.182978 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"b949c0e59097384d76683740d569c1b6a4440ebd12914648594ff426daaad130"} Jan 28 16:04:04 crc kubenswrapper[4811]: I0128 16:04:04.183015 4811 scope.go:117] "RemoveContainer" containerID="eabdb53100645a7db124d74211393e44eb29247556832b21f6981f388ac52c1c" Jan 28 16:04:04 crc kubenswrapper[4811]: I0128 16:04:04.189472 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-92chv" Jan 28 16:04:04 crc kubenswrapper[4811]: I0128 16:04:04.526296 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd" Jan 28 16:04:05 crc kubenswrapper[4811]: I0128 16:04:05.008683 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-9f67d7-pqwwm" Jan 28 16:04:10 crc kubenswrapper[4811]: E0128 16:04:10.341704 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" podUID="6fe8a24f-c7b1-4b27-908e-996e66803e0a" Jan 28 16:04:10 crc kubenswrapper[4811]: E0128 16:04:10.342279 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" podUID="003629ab-e0b0-49b2-a4ab-f9cf7b67b588" Jan 28 16:04:11 crc kubenswrapper[4811]: E0128 16:04:11.341688 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" podUID="def156ed-ad39-4f23-aa51-bd1d36b35543" Jan 28 16:04:21 crc kubenswrapper[4811]: I0128 16:04:21.341665 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:04:23 crc kubenswrapper[4811]: I0128 16:04:23.313145 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" event={"ID":"6fe8a24f-c7b1-4b27-908e-996e66803e0a","Type":"ContainerStarted","Data":"35e9b1615892160c6b5a371bb7e221eac9b5238d05f4befc0d64f990588ffe44"} Jan 28 16:04:23 crc kubenswrapper[4811]: I0128 16:04:23.313639 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:04:23 crc kubenswrapper[4811]: I0128 16:04:23.330898 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" podStartSLOduration=2.9440421690000003 podStartE2EDuration="1m1.330882101s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.98501999 +0000 UTC m=+1096.739383573" lastFinishedPulling="2026-01-28 16:04:22.371859932 +0000 UTC m=+1155.126223505" observedRunningTime="2026-01-28 16:04:23.32489081 +0000 UTC m=+1156.079254403" watchObservedRunningTime="2026-01-28 16:04:23.330882101 +0000 UTC m=+1156.085245684" Jan 28 16:04:26 crc kubenswrapper[4811]: I0128 16:04:26.335310 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" event={"ID":"def156ed-ad39-4f23-aa51-bd1d36b35543","Type":"ContainerStarted","Data":"0332a42bf1938f73f327b271442869c22793203251c22ba6ef4637ff66c86358"} Jan 28 16:04:26 crc kubenswrapper[4811]: I0128 16:04:26.336072 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:04:26 crc kubenswrapper[4811]: I0128 16:04:26.349387 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" event={"ID":"003629ab-e0b0-49b2-a4ab-f9cf7b67b588","Type":"ContainerStarted","Data":"d2578f211017e47146d1189cade0c3d375f9f6a4550ff37d830130ced894eda8"} Jan 28 16:04:26 crc kubenswrapper[4811]: I0128 16:04:26.349659 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:04:26 crc kubenswrapper[4811]: I0128 16:04:26.358542 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" podStartSLOduration=2.253993163 podStartE2EDuration="1m4.35851134s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:24.010138277 +0000 UTC m=+1096.764501860" lastFinishedPulling="2026-01-28 16:04:26.114656454 +0000 UTC m=+1158.869020037" observedRunningTime="2026-01-28 16:04:26.352687222 +0000 UTC m=+1159.107050805" watchObservedRunningTime="2026-01-28 16:04:26.35851134 +0000 UTC m=+1159.112874923" Jan 28 16:04:26 crc kubenswrapper[4811]: I0128 16:04:26.373079 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" podStartSLOduration=3.131034292 podStartE2EDuration="1m4.373056112s" podCreationTimestamp="2026-01-28 16:03:22 +0000 UTC" firstStartedPulling="2026-01-28 16:03:23.995983586 +0000 UTC m=+1096.750347169" lastFinishedPulling="2026-01-28 16:04:25.238005396 +0000 UTC m=+1157.992368989" observedRunningTime="2026-01-28 16:04:26.372053935 +0000 UTC m=+1159.126417518" watchObservedRunningTime="2026-01-28 16:04:26.373056112 +0000 UTC m=+1159.127419695" Jan 28 16:04:33 crc kubenswrapper[4811]: I0128 16:04:33.090573 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-r7j2f" Jan 28 16:04:33 crc kubenswrapper[4811]: I0128 16:04:33.121417 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gkhtq" Jan 28 16:04:33 crc kubenswrapper[4811]: I0128 16:04:33.191817 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-lxmgs" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.696792 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-x9hjk"] Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.698528 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.701261 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.701463 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.701567 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.701759 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-k44hh" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.715904 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-x9hjk"] Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.757977 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsx99\" (UniqueName: \"kubernetes.io/projected/c37bfb17-f77b-4572-9ff3-2c8b45839abe-kube-api-access-tsx99\") pod \"dnsmasq-dns-84bb9d8bd9-x9hjk\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.758098 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37bfb17-f77b-4572-9ff3-2c8b45839abe-config\") pod \"dnsmasq-dns-84bb9d8bd9-x9hjk\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.763107 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pg82n"] Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.765812 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.771867 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.773112 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pg82n"] Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.859353 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-dns-svc\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.859392 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-config\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.859494 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37bfb17-f77b-4572-9ff3-2c8b45839abe-config\") pod \"dnsmasq-dns-84bb9d8bd9-x9hjk\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.859552 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9tlw\" (UniqueName: \"kubernetes.io/projected/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-kube-api-access-m9tlw\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.859602 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsx99\" (UniqueName: \"kubernetes.io/projected/c37bfb17-f77b-4572-9ff3-2c8b45839abe-kube-api-access-tsx99\") pod \"dnsmasq-dns-84bb9d8bd9-x9hjk\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.860884 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37bfb17-f77b-4572-9ff3-2c8b45839abe-config\") pod \"dnsmasq-dns-84bb9d8bd9-x9hjk\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.877966 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsx99\" (UniqueName: \"kubernetes.io/projected/c37bfb17-f77b-4572-9ff3-2c8b45839abe-kube-api-access-tsx99\") pod \"dnsmasq-dns-84bb9d8bd9-x9hjk\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.960532 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-dns-svc\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.960580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-config\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.960633 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9tlw\" (UniqueName: \"kubernetes.io/projected/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-kube-api-access-m9tlw\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.961638 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-dns-svc\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.961692 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-config\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:49 crc kubenswrapper[4811]: I0128 16:04:49.977260 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9tlw\" (UniqueName: \"kubernetes.io/projected/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-kube-api-access-m9tlw\") pod \"dnsmasq-dns-5f854695bc-pg82n\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:50 crc kubenswrapper[4811]: I0128 16:04:50.024742 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:04:50 crc kubenswrapper[4811]: I0128 16:04:50.083347 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:04:50 crc kubenswrapper[4811]: I0128 16:04:50.465202 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-x9hjk"] Jan 28 16:04:50 crc kubenswrapper[4811]: I0128 16:04:50.501619 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" event={"ID":"c37bfb17-f77b-4572-9ff3-2c8b45839abe","Type":"ContainerStarted","Data":"0322589fd7a76006967d328b54eeaaa874e89780df5c85fd2ff36732f04429b4"} Jan 28 16:04:50 crc kubenswrapper[4811]: I0128 16:04:50.598792 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pg82n"] Jan 28 16:04:50 crc kubenswrapper[4811]: W0128 16:04:50.604959 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14b8d5c0_9e09_48de_8efa_12a3c053e9f8.slice/crio-948a323669d401a2a1c33840c5e68989d2dd791108fd8a6293a4cc8a7fbb73c4 WatchSource:0}: Error finding container 948a323669d401a2a1c33840c5e68989d2dd791108fd8a6293a4cc8a7fbb73c4: Status 404 returned error can't find the container with id 948a323669d401a2a1c33840c5e68989d2dd791108fd8a6293a4cc8a7fbb73c4 Jan 28 16:04:51 crc kubenswrapper[4811]: I0128 16:04:51.513714 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" event={"ID":"14b8d5c0-9e09-48de-8efa-12a3c053e9f8","Type":"ContainerStarted","Data":"948a323669d401a2a1c33840c5e68989d2dd791108fd8a6293a4cc8a7fbb73c4"} Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.459170 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pg82n"] Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.481614 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-7w9pg"] Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.483097 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.496181 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-7w9pg"] Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.519726 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb8tr\" (UniqueName: \"kubernetes.io/projected/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-kube-api-access-fb8tr\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.519903 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-config\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.519945 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.623559 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-config\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.623608 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.623691 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb8tr\" (UniqueName: \"kubernetes.io/projected/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-kube-api-access-fb8tr\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.624758 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-config\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.627868 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.692677 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb8tr\" (UniqueName: \"kubernetes.io/projected/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-kube-api-access-fb8tr\") pod \"dnsmasq-dns-744ffd65bc-7w9pg\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.750402 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-x9hjk"] Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.781259 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6ztk9"] Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.782364 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.793216 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6ztk9"] Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.811383 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.827526 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-dns-svc\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.827572 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf5dv\" (UniqueName: \"kubernetes.io/projected/a6313906-9e80-41ce-b4c1-2f673d09425b-kube-api-access-sf5dv\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.827626 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-config\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.928907 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-config\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.929202 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-dns-svc\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.929227 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf5dv\" (UniqueName: \"kubernetes.io/projected/a6313906-9e80-41ce-b4c1-2f673d09425b-kube-api-access-sf5dv\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.929824 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-dns-svc\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.929867 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-config\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:52 crc kubenswrapper[4811]: I0128 16:04:52.960415 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf5dv\" (UniqueName: \"kubernetes.io/projected/a6313906-9e80-41ce-b4c1-2f673d09425b-kube-api-access-sf5dv\") pod \"dnsmasq-dns-95f5f6995-6ztk9\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.097310 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.324299 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-7w9pg"] Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.560157 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" event={"ID":"4b0dc77d-eab2-4a1a-b975-54dc6ec89453","Type":"ContainerStarted","Data":"320033b6768db17ac9ec617c4e4152991afee509ba371258272034ab096f3a9b"} Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.593834 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6ztk9"] Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.629854 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.632801 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.635878 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-p8pvw" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.635914 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.636020 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.636794 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.638476 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.641527 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.641698 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.642562 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739257 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739310 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739341 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/23095127-8b86-445a-8c32-1e6bc14bf05e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739386 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739461 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739476 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/23095127-8b86-445a-8c32-1e6bc14bf05e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739511 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739597 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.739646 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7p8s\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-kube-api-access-r7p8s\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.840606 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841040 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/23095127-8b86-445a-8c32-1e6bc14bf05e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841064 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841085 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841102 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/23095127-8b86-445a-8c32-1e6bc14bf05e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841136 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841173 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841189 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841209 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7p8s\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-kube-api-access-r7p8s\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841280 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841301 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.841729 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.842056 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.842635 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.842845 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.843162 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.843372 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.846505 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.847152 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/23095127-8b86-445a-8c32-1e6bc14bf05e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.847259 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.847363 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/23095127-8b86-445a-8c32-1e6bc14bf05e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.869395 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7p8s\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-kube-api-access-r7p8s\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.883566 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " pod="openstack/rabbitmq-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.911557 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.915264 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.917658 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.917839 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.917926 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.918045 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-swmf5" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.918059 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.917932 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.918235 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.928741 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:04:53 crc kubenswrapper[4811]: I0128 16:04:53.961883 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.046787 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64ts8\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-kube-api-access-64ts8\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.046841 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.046880 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.046907 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b7599aa-7114-46c9-bf38-b6d735b75326-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.046975 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.047118 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b7599aa-7114-46c9-bf38-b6d735b75326-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.047150 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.047185 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.047218 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.047251 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.047277 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148817 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148864 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b7599aa-7114-46c9-bf38-b6d735b75326-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148881 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b7599aa-7114-46c9-bf38-b6d735b75326-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148945 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148968 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.148992 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.149015 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.149030 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.149069 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64ts8\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-kube-api-access-64ts8\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.149089 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.149982 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.150404 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.151833 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.153622 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.153863 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.154569 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b7599aa-7114-46c9-bf38-b6d735b75326-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.158024 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b7599aa-7114-46c9-bf38-b6d735b75326-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.159718 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.176442 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64ts8\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-kube-api-access-64ts8\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.182458 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.182806 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.186356 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:54 crc kubenswrapper[4811]: I0128 16:04:54.263624 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.103648 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.104884 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.107627 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.107949 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.108200 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.108208 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-m9p4m" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.109994 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.132183 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272219 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272270 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272337 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-config-data-default\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272387 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/156027c6-3cec-4317-8267-eb234c90af40-config-data-generated\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272416 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-kolla-config\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272469 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-operator-scripts\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272504 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6mkz\" (UniqueName: \"kubernetes.io/projected/156027c6-3cec-4317-8267-eb234c90af40-kube-api-access-p6mkz\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.272538 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377278 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377675 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377724 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-config-data-default\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377765 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/156027c6-3cec-4317-8267-eb234c90af40-config-data-generated\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377802 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-kolla-config\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377843 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-operator-scripts\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.377891 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6mkz\" (UniqueName: \"kubernetes.io/projected/156027c6-3cec-4317-8267-eb234c90af40-kube-api-access-p6mkz\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.379840 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-config-data-default\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.380096 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-kolla-config\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.380593 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/156027c6-3cec-4317-8267-eb234c90af40-config-data-generated\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.380912 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.381884 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-operator-scripts\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.382218 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.387862 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.402747 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6mkz\" (UniqueName: \"kubernetes.io/projected/156027c6-3cec-4317-8267-eb234c90af40-kube-api-access-p6mkz\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.426890 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " pod="openstack/openstack-galera-0" Jan 28 16:04:55 crc kubenswrapper[4811]: I0128 16:04:55.436479 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.509607 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.510842 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.514193 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7dwr5" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.514662 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.515143 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.515426 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.524633 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.600984 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601048 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601079 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601109 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601152 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601210 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh5jt\" (UniqueName: \"kubernetes.io/projected/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kube-api-access-kh5jt\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601240 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.601275 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.702904 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh5jt\" (UniqueName: \"kubernetes.io/projected/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kube-api-access-kh5jt\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.702965 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.702990 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.703083 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.703113 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.703139 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.703167 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.703208 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.703398 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.704538 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.704535 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.704792 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.705555 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.713261 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.713332 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.729185 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.730367 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.736354 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh5jt\" (UniqueName: \"kubernetes.io/projected/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kube-api-access-kh5jt\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.736609 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-44lm9" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.736744 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.736865 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.738963 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.746832 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.805100 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-memcached-tls-certs\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.805164 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-config-data\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.805193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg9mz\" (UniqueName: \"kubernetes.io/projected/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kube-api-access-jg9mz\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.805288 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-combined-ca-bundle\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.805334 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kolla-config\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.827620 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.906551 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-combined-ca-bundle\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.906615 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kolla-config\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.906663 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-memcached-tls-certs\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.906692 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-config-data\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.906720 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg9mz\" (UniqueName: \"kubernetes.io/projected/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kube-api-access-jg9mz\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.907706 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-config-data\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.908074 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kolla-config\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.912658 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-combined-ca-bundle\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.914936 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-memcached-tls-certs\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:56 crc kubenswrapper[4811]: I0128 16:04:56.940893 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg9mz\" (UniqueName: \"kubernetes.io/projected/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kube-api-access-jg9mz\") pod \"memcached-0\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " pod="openstack/memcached-0" Jan 28 16:04:57 crc kubenswrapper[4811]: I0128 16:04:57.095206 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.662857 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.664194 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.670288 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-d8s5p" Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.681379 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.738957 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jjmf\" (UniqueName: \"kubernetes.io/projected/f1fdd831-6083-4581-9377-64690fda7f53-kube-api-access-4jjmf\") pod \"kube-state-metrics-0\" (UID: \"f1fdd831-6083-4581-9377-64690fda7f53\") " pod="openstack/kube-state-metrics-0" Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.840038 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jjmf\" (UniqueName: \"kubernetes.io/projected/f1fdd831-6083-4581-9377-64690fda7f53-kube-api-access-4jjmf\") pod \"kube-state-metrics-0\" (UID: \"f1fdd831-6083-4581-9377-64690fda7f53\") " pod="openstack/kube-state-metrics-0" Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.857638 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jjmf\" (UniqueName: \"kubernetes.io/projected/f1fdd831-6083-4581-9377-64690fda7f53-kube-api-access-4jjmf\") pod \"kube-state-metrics-0\" (UID: \"f1fdd831-6083-4581-9377-64690fda7f53\") " pod="openstack/kube-state-metrics-0" Jan 28 16:04:58 crc kubenswrapper[4811]: I0128 16:04:58.979813 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.661446 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" event={"ID":"a6313906-9e80-41ce-b4c1-2f673d09425b","Type":"ContainerStarted","Data":"92e6abdb6443ea186120b3948813d56e66dc31894971b27cd0a8e56c332d9f87"} Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.820549 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-wzxwb"] Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.823740 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.831746 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.832050 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-z7vl2" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.846331 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wzxwb"] Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.883063 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fxxlf"] Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.887296 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.888220 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-log\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.888386 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dpb4\" (UniqueName: \"kubernetes.io/projected/8bfbbf41-033c-479e-b625-396378f8afa2-kube-api-access-5dpb4\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.888448 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-etc-ovs\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.888476 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-lib\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.888511 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-run\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.888548 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bfbbf41-033c-479e-b625-396378f8afa2-scripts\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.890401 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.931934 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fxxlf"] Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990381 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990477 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-log\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990501 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwpj2\" (UniqueName: \"kubernetes.io/projected/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-kube-api-access-cwpj2\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990532 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-log-ovn\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990550 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-ovn-controller-tls-certs\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990590 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-etc-ovs\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990669 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dpb4\" (UniqueName: \"kubernetes.io/projected/8bfbbf41-033c-479e-b625-396378f8afa2-kube-api-access-5dpb4\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990719 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-lib\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990755 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run-ovn\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990793 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-run\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990816 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.990863 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bfbbf41-033c-479e-b625-396378f8afa2-scripts\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.991136 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-log\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.991267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-run\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.991315 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-lib\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.991424 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-combined-ca-bundle\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.991424 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-etc-ovs\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:01 crc kubenswrapper[4811]: I0128 16:05:01.993786 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bfbbf41-033c-479e-b625-396378f8afa2-scripts\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.009544 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dpb4\" (UniqueName: \"kubernetes.io/projected/8bfbbf41-033c-479e-b625-396378f8afa2-kube-api-access-5dpb4\") pod \"ovn-controller-ovs-wzxwb\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093320 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run-ovn\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093378 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093460 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-combined-ca-bundle\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093491 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093542 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwpj2\" (UniqueName: \"kubernetes.io/projected/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-kube-api-access-cwpj2\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093578 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-log-ovn\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.093597 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-ovn-controller-tls-certs\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.095649 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.096998 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-log-ovn\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.097035 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run-ovn\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.101375 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.114864 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-combined-ca-bundle\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.117013 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-ovn-controller-tls-certs\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.124318 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwpj2\" (UniqueName: \"kubernetes.io/projected/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-kube-api-access-cwpj2\") pod \"ovn-controller-fxxlf\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.172363 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:02 crc kubenswrapper[4811]: I0128 16:05:02.212623 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.145329 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.149936 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.154258 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.154531 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-wqn9m" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.154558 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.154227 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.155998 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.169041 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.213926 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214206 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214250 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc54361e-5f34-4253-b91f-b1683e944191-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214288 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214359 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn88r\" (UniqueName: \"kubernetes.io/projected/bc54361e-5f34-4253-b91f-b1683e944191-kube-api-access-cn88r\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214404 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-config\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214463 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.214540 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316723 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn88r\" (UniqueName: \"kubernetes.io/projected/bc54361e-5f34-4253-b91f-b1683e944191-kube-api-access-cn88r\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316804 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-config\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316875 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316945 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.316978 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc54361e-5f34-4253-b91f-b1683e944191-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.317019 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.323093 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-config\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.324406 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.324885 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc54361e-5f34-4253-b91f-b1683e944191-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.325230 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.326184 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.337038 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.340106 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.341800 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn88r\" (UniqueName: \"kubernetes.io/projected/bc54361e-5f34-4253-b91f-b1683e944191-kube-api-access-cn88r\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.349945 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:03 crc kubenswrapper[4811]: I0128 16:05:03.479557 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:05 crc kubenswrapper[4811]: I0128 16:05:05.619761 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.550676 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.552531 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.555034 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.566942 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ggtxm" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.566981 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.566940 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.577592 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.707810 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708047 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708227 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708274 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-config\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708361 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp5zq\" (UniqueName: \"kubernetes.io/projected/ad905f29-3683-466d-8acd-192cc5ac0960-kube-api-access-rp5zq\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708411 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708544 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.708643 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810139 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810195 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810266 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810320 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810354 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-config\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810396 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp5zq\" (UniqueName: \"kubernetes.io/projected/ad905f29-3683-466d-8acd-192cc5ac0960-kube-api-access-rp5zq\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810413 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810456 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.810965 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.812324 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-config\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.818316 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.818508 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.818707 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.819535 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.828236 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.838821 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp5zq\" (UniqueName: \"kubernetes.io/projected/ad905f29-3683-466d-8acd-192cc5ac0960-kube-api-access-rp5zq\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.849483 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:06 crc kubenswrapper[4811]: I0128 16:05:06.884957 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:10 crc kubenswrapper[4811]: W0128 16:05:10.117712 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1fdd831_6083_4581_9377_64690fda7f53.slice/crio-21d371442278dc823f8e71e20ece900098a5704be7485feafc3b158ef73002d8 WatchSource:0}: Error finding container 21d371442278dc823f8e71e20ece900098a5704be7485feafc3b158ef73002d8: Status 404 returned error can't find the container with id 21d371442278dc823f8e71e20ece900098a5704be7485feafc3b158ef73002d8 Jan 28 16:05:10 crc kubenswrapper[4811]: I0128 16:05:10.750525 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f1fdd831-6083-4581-9377-64690fda7f53","Type":"ContainerStarted","Data":"21d371442278dc823f8e71e20ece900098a5704be7485feafc3b158ef73002d8"} Jan 28 16:05:11 crc kubenswrapper[4811]: E0128 16:05:11.084321 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 28 16:05:11 crc kubenswrapper[4811]: E0128 16:05:11.084530 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tsx99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84bb9d8bd9-x9hjk_openstack(c37bfb17-f77b-4572-9ff3-2c8b45839abe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:05:11 crc kubenswrapper[4811]: E0128 16:05:11.085718 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" podUID="c37bfb17-f77b-4572-9ff3-2c8b45839abe" Jan 28 16:05:11 crc kubenswrapper[4811]: E0128 16:05:11.176054 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 28 16:05:11 crc kubenswrapper[4811]: E0128 16:05:11.176213 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m9tlw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-pg82n_openstack(14b8d5c0-9e09-48de-8efa-12a3c053e9f8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:05:11 crc kubenswrapper[4811]: E0128 16:05:11.177374 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" podUID="14b8d5c0-9e09-48de-8efa-12a3c053e9f8" Jan 28 16:05:11 crc kubenswrapper[4811]: I0128 16:05:11.611692 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 16:05:11 crc kubenswrapper[4811]: I0128 16:05:11.776979 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b","Type":"ContainerStarted","Data":"395f6a1cf5287731097f623dc9836e9c87bf16746900eaef633ccbd3245152cd"} Jan 28 16:05:11 crc kubenswrapper[4811]: I0128 16:05:11.795178 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" event={"ID":"4b0dc77d-eab2-4a1a-b975-54dc6ec89453","Type":"ContainerStarted","Data":"62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1"} Jan 28 16:05:11 crc kubenswrapper[4811]: I0128 16:05:11.803290 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" event={"ID":"a6313906-9e80-41ce-b4c1-2f673d09425b","Type":"ContainerStarted","Data":"6c88cb44506b2bebe3348f7665aa8d4cbbc47bdb5fbce7f8a76755269c9e1823"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.017372 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.030217 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.040964 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fxxlf"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.051844 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.060896 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.186821 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wzxwb"] Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.303668 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ad4ddb3_372a_4f21_9370_5ef2d74c4d9e.slice/crio-86e197af024f90b7f8e4f0b756032a9a09c5fd06241f6ee983b0164d265ca464 WatchSource:0}: Error finding container 86e197af024f90b7f8e4f0b756032a9a09c5fd06241f6ee983b0164d265ca464: Status 404 returned error can't find the container with id 86e197af024f90b7f8e4f0b756032a9a09c5fd06241f6ee983b0164d265ca464 Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.305979 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84b57c00_a800_4b82_98c7_8ebcc04c2ab6.slice/crio-18bbb3034bb69ed9e6b1b0f7994101be22ad34122776ec8404e6e32b129e023a WatchSource:0}: Error finding container 18bbb3034bb69ed9e6b1b0f7994101be22ad34122776ec8404e6e32b129e023a: Status 404 returned error can't find the container with id 18bbb3034bb69ed9e6b1b0f7994101be22ad34122776ec8404e6e32b129e023a Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.307570 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod156027c6_3cec_4317_8267_eb234c90af40.slice/crio-bfd28952dd023fd20d841a82907630237832481c7f70abf5c7a71ef6816f119b WatchSource:0}: Error finding container bfd28952dd023fd20d841a82907630237832481c7f70abf5c7a71ef6816f119b: Status 404 returned error can't find the container with id bfd28952dd023fd20d841a82907630237832481c7f70abf5c7a71ef6816f119b Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.308872 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b7599aa_7114_46c9_bf38_b6d735b75326.slice/crio-45b948098b16be556ad661750cce4e4c59d4eb55bf4a2b34bf0ebe3d475671d9 WatchSource:0}: Error finding container 45b948098b16be556ad661750cce4e4c59d4eb55bf4a2b34bf0ebe3d475671d9: Status 404 returned error can't find the container with id 45b948098b16be556ad661750cce4e4c59d4eb55bf4a2b34bf0ebe3d475671d9 Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.313421 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23095127_8b86_445a_8c32_1e6bc14bf05e.slice/crio-5d1af227a546798f00c079ebf2fcb81520a200698bcd1d0959a021cd15eea35d WatchSource:0}: Error finding container 5d1af227a546798f00c079ebf2fcb81520a200698bcd1d0959a021cd15eea35d: Status 404 returned error can't find the container with id 5d1af227a546798f00c079ebf2fcb81520a200698bcd1d0959a021cd15eea35d Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.324880 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bfbbf41_033c_479e_b625_396378f8afa2.slice/crio-6d7f40bd9c1402eada25f27c0958d15150774e9af5936dda2dff719e3daeedb6 WatchSource:0}: Error finding container 6d7f40bd9c1402eada25f27c0958d15150774e9af5936dda2dff719e3daeedb6: Status 404 returned error can't find the container with id 6d7f40bd9c1402eada25f27c0958d15150774e9af5936dda2dff719e3daeedb6 Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.392580 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.407203 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.414093 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:05:12 crc kubenswrapper[4811]: W0128 16:05:12.436400 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc54361e_5f34_4253_b91f_b1683e944191.slice/crio-e6713f981b12ee8623fc24a3159310d213021263913657306c24da8a342ccd1a WatchSource:0}: Error finding container e6713f981b12ee8623fc24a3159310d213021263913657306c24da8a342ccd1a: Status 404 returned error can't find the container with id e6713f981b12ee8623fc24a3159310d213021263913657306c24da8a342ccd1a Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.548879 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-config\") pod \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.548982 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37bfb17-f77b-4572-9ff3-2c8b45839abe-config\") pod \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.549092 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsx99\" (UniqueName: \"kubernetes.io/projected/c37bfb17-f77b-4572-9ff3-2c8b45839abe-kube-api-access-tsx99\") pod \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\" (UID: \"c37bfb17-f77b-4572-9ff3-2c8b45839abe\") " Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.549168 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9tlw\" (UniqueName: \"kubernetes.io/projected/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-kube-api-access-m9tlw\") pod \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.549202 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-dns-svc\") pod \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\" (UID: \"14b8d5c0-9e09-48de-8efa-12a3c053e9f8\") " Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.549473 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-config" (OuterVolumeSpecName: "config") pod "14b8d5c0-9e09-48de-8efa-12a3c053e9f8" (UID: "14b8d5c0-9e09-48de-8efa-12a3c053e9f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.549898 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.549915 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "14b8d5c0-9e09-48de-8efa-12a3c053e9f8" (UID: "14b8d5c0-9e09-48de-8efa-12a3c053e9f8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.551134 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c37bfb17-f77b-4572-9ff3-2c8b45839abe-config" (OuterVolumeSpecName: "config") pod "c37bfb17-f77b-4572-9ff3-2c8b45839abe" (UID: "c37bfb17-f77b-4572-9ff3-2c8b45839abe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.555391 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c37bfb17-f77b-4572-9ff3-2c8b45839abe-kube-api-access-tsx99" (OuterVolumeSpecName: "kube-api-access-tsx99") pod "c37bfb17-f77b-4572-9ff3-2c8b45839abe" (UID: "c37bfb17-f77b-4572-9ff3-2c8b45839abe"). InnerVolumeSpecName "kube-api-access-tsx99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.555506 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-kube-api-access-m9tlw" (OuterVolumeSpecName: "kube-api-access-m9tlw") pod "14b8d5c0-9e09-48de-8efa-12a3c053e9f8" (UID: "14b8d5c0-9e09-48de-8efa-12a3c053e9f8"). InnerVolumeSpecName "kube-api-access-m9tlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.651983 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37bfb17-f77b-4572-9ff3-2c8b45839abe-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.652039 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsx99\" (UniqueName: \"kubernetes.io/projected/c37bfb17-f77b-4572-9ff3-2c8b45839abe-kube-api-access-tsx99\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.652058 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9tlw\" (UniqueName: \"kubernetes.io/projected/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-kube-api-access-m9tlw\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.652074 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14b8d5c0-9e09-48de-8efa-12a3c053e9f8-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.814015 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"84b57c00-a800-4b82-98c7-8ebcc04c2ab6","Type":"ContainerStarted","Data":"18bbb3034bb69ed9e6b1b0f7994101be22ad34122776ec8404e6e32b129e023a"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.815934 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"156027c6-3cec-4317-8267-eb234c90af40","Type":"ContainerStarted","Data":"bfd28952dd023fd20d841a82907630237832481c7f70abf5c7a71ef6816f119b"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.817704 4811 generic.go:334] "Generic (PLEG): container finished" podID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerID="62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1" exitCode=0 Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.817801 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" event={"ID":"4b0dc77d-eab2-4a1a-b975-54dc6ec89453","Type":"ContainerDied","Data":"62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.819002 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" event={"ID":"14b8d5c0-9e09-48de-8efa-12a3c053e9f8","Type":"ContainerDied","Data":"948a323669d401a2a1c33840c5e68989d2dd791108fd8a6293a4cc8a7fbb73c4"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.819063 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pg82n" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.821593 4811 generic.go:334] "Generic (PLEG): container finished" podID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerID="6c88cb44506b2bebe3348f7665aa8d4cbbc47bdb5fbce7f8a76755269c9e1823" exitCode=0 Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.821632 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" event={"ID":"a6313906-9e80-41ce-b4c1-2f673d09425b","Type":"ContainerDied","Data":"6c88cb44506b2bebe3348f7665aa8d4cbbc47bdb5fbce7f8a76755269c9e1823"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.823261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bc54361e-5f34-4253-b91f-b1683e944191","Type":"ContainerStarted","Data":"e6713f981b12ee8623fc24a3159310d213021263913657306c24da8a342ccd1a"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.824790 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf" event={"ID":"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e","Type":"ContainerStarted","Data":"86e197af024f90b7f8e4f0b756032a9a09c5fd06241f6ee983b0164d265ca464"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.826028 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"23095127-8b86-445a-8c32-1e6bc14bf05e","Type":"ContainerStarted","Data":"5d1af227a546798f00c079ebf2fcb81520a200698bcd1d0959a021cd15eea35d"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.827358 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3b7599aa-7114-46c9-bf38-b6d735b75326","Type":"ContainerStarted","Data":"45b948098b16be556ad661750cce4e4c59d4eb55bf4a2b34bf0ebe3d475671d9"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.828651 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" event={"ID":"c37bfb17-f77b-4572-9ff3-2c8b45839abe","Type":"ContainerDied","Data":"0322589fd7a76006967d328b54eeaaa874e89780df5c85fd2ff36732f04429b4"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.828676 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-x9hjk" Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.829721 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerStarted","Data":"6d7f40bd9c1402eada25f27c0958d15150774e9af5936dda2dff719e3daeedb6"} Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.916920 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pg82n"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.922970 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pg82n"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.937465 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-x9hjk"] Jan 28 16:05:12 crc kubenswrapper[4811]: I0128 16:05:12.952284 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-x9hjk"] Jan 28 16:05:13 crc kubenswrapper[4811]: I0128 16:05:13.440848 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 16:05:13 crc kubenswrapper[4811]: W0128 16:05:13.667171 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad905f29_3683_466d_8acd_192cc5ac0960.slice/crio-3f95d4f8426c76a308eeb892a389d11c6c75e84393c6b08b5c16880d9aed5118 WatchSource:0}: Error finding container 3f95d4f8426c76a308eeb892a389d11c6c75e84393c6b08b5c16880d9aed5118: Status 404 returned error can't find the container with id 3f95d4f8426c76a308eeb892a389d11c6c75e84393c6b08b5c16880d9aed5118 Jan 28 16:05:13 crc kubenswrapper[4811]: I0128 16:05:13.837990 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ad905f29-3683-466d-8acd-192cc5ac0960","Type":"ContainerStarted","Data":"3f95d4f8426c76a308eeb892a389d11c6c75e84393c6b08b5c16880d9aed5118"} Jan 28 16:05:14 crc kubenswrapper[4811]: I0128 16:05:14.352569 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14b8d5c0-9e09-48de-8efa-12a3c053e9f8" path="/var/lib/kubelet/pods/14b8d5c0-9e09-48de-8efa-12a3c053e9f8/volumes" Jan 28 16:05:14 crc kubenswrapper[4811]: I0128 16:05:14.352956 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c37bfb17-f77b-4572-9ff3-2c8b45839abe" path="/var/lib/kubelet/pods/c37bfb17-f77b-4572-9ff3-2c8b45839abe/volumes" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.899031 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"156027c6-3cec-4317-8267-eb234c90af40","Type":"ContainerStarted","Data":"2737e6ad87982b7277c55b34f07400b82de02f83c09921c823cc3c4906fd86e9"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.900799 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bc54361e-5f34-4253-b91f-b1683e944191","Type":"ContainerStarted","Data":"dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.903808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf" event={"ID":"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e","Type":"ContainerStarted","Data":"ba856df2f3f7c7a7f9e7775dc89b697ebd4dd48e8a805217e5ee26242f8b0270"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.903905 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.905697 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"84b57c00-a800-4b82-98c7-8ebcc04c2ab6","Type":"ContainerStarted","Data":"9c930ce4914f5a68852c3724ae30e8283ace4f330ad36c534f8e4f690608fe20"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.905803 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.907535 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f1fdd831-6083-4581-9377-64690fda7f53","Type":"ContainerStarted","Data":"7c020406f2a1e5d49e8f348d463ba2141c15287a626315f1b18551e0680999d5"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.907642 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.909414 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" event={"ID":"4b0dc77d-eab2-4a1a-b975-54dc6ec89453","Type":"ContainerStarted","Data":"080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.910522 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.912817 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerStarted","Data":"5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.915056 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" event={"ID":"a6313906-9e80-41ce-b4c1-2f673d09425b","Type":"ContainerStarted","Data":"f4818e02d5dde7e04b51695018c6c3077d70d55037f32f37689cd0aa7713cdd7"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.916050 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.917442 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b","Type":"ContainerStarted","Data":"518785f09823d3db909d33a7b324f729682e76e6299d31a97670582c7114b971"} Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.947083 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-fxxlf" podStartSLOduration=12.224066612 podStartE2EDuration="19.947064017s" podCreationTimestamp="2026-01-28 16:05:01 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.306493506 +0000 UTC m=+1205.060857089" lastFinishedPulling="2026-01-28 16:05:20.029490891 +0000 UTC m=+1212.783854494" observedRunningTime="2026-01-28 16:05:20.940978523 +0000 UTC m=+1213.695342116" watchObservedRunningTime="2026-01-28 16:05:20.947064017 +0000 UTC m=+1213.701427600" Jan 28 16:05:20 crc kubenswrapper[4811]: I0128 16:05:20.985933 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" podStartSLOduration=18.489876025 podStartE2EDuration="28.985908676s" podCreationTimestamp="2026-01-28 16:04:52 +0000 UTC" firstStartedPulling="2026-01-28 16:05:01.079848022 +0000 UTC m=+1193.834211625" lastFinishedPulling="2026-01-28 16:05:11.575880693 +0000 UTC m=+1204.330244276" observedRunningTime="2026-01-28 16:05:20.978736402 +0000 UTC m=+1213.733099985" watchObservedRunningTime="2026-01-28 16:05:20.985908676 +0000 UTC m=+1213.740272259" Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.036957 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=17.783363323 podStartE2EDuration="25.036934123s" podCreationTimestamp="2026-01-28 16:04:56 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.310962916 +0000 UTC m=+1205.065326499" lastFinishedPulling="2026-01-28 16:05:19.564533726 +0000 UTC m=+1212.318897299" observedRunningTime="2026-01-28 16:05:21.021125406 +0000 UTC m=+1213.775488999" watchObservedRunningTime="2026-01-28 16:05:21.036934123 +0000 UTC m=+1213.791297706" Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.054228 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.406283778 podStartE2EDuration="23.054211059s" podCreationTimestamp="2026-01-28 16:04:58 +0000 UTC" firstStartedPulling="2026-01-28 16:05:10.120935138 +0000 UTC m=+1202.875298721" lastFinishedPulling="2026-01-28 16:05:19.768862429 +0000 UTC m=+1212.523226002" observedRunningTime="2026-01-28 16:05:21.051096055 +0000 UTC m=+1213.805459658" watchObservedRunningTime="2026-01-28 16:05:21.054211059 +0000 UTC m=+1213.808574632" Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.229353 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" podStartSLOduration=11.351731421 podStartE2EDuration="29.229330464s" podCreationTimestamp="2026-01-28 16:04:52 +0000 UTC" firstStartedPulling="2026-01-28 16:04:53.357040014 +0000 UTC m=+1186.111403597" lastFinishedPulling="2026-01-28 16:05:11.234639057 +0000 UTC m=+1203.989002640" observedRunningTime="2026-01-28 16:05:21.226173069 +0000 UTC m=+1213.980536672" watchObservedRunningTime="2026-01-28 16:05:21.229330464 +0000 UTC m=+1213.983694047" Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.928247 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ad905f29-3683-466d-8acd-192cc5ac0960","Type":"ContainerStarted","Data":"a7b75f68bd55325c1892a8a68699e72299412f605399d3727dc7a8c78169fd1c"} Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.930310 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3b7599aa-7114-46c9-bf38-b6d735b75326","Type":"ContainerStarted","Data":"750be0d18f938176efd1aca389b486dcb19e5d7b9966a3f4fb7d1aa4cce062d6"} Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.933006 4811 generic.go:334] "Generic (PLEG): container finished" podID="8bfbbf41-033c-479e-b625-396378f8afa2" containerID="5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05" exitCode=0 Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.933109 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerDied","Data":"5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05"} Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.933149 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.933161 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerStarted","Data":"0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf"} Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.933170 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerStarted","Data":"d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21"} Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.933220 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.935022 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"23095127-8b86-445a-8c32-1e6bc14bf05e","Type":"ContainerStarted","Data":"28aa321bf0e3ef329793682970b67cddf5dcdb81c24e0b66b553354710937d33"} Jan 28 16:05:21 crc kubenswrapper[4811]: I0128 16:05:21.991174 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-wzxwb" podStartSLOduration=13.289233993 podStartE2EDuration="20.991151129s" podCreationTimestamp="2026-01-28 16:05:01 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.327547274 +0000 UTC m=+1205.081910857" lastFinishedPulling="2026-01-28 16:05:20.02946439 +0000 UTC m=+1212.783827993" observedRunningTime="2026-01-28 16:05:21.983888723 +0000 UTC m=+1214.738252306" watchObservedRunningTime="2026-01-28 16:05:21.991151129 +0000 UTC m=+1214.745514712" Jan 28 16:05:25 crc kubenswrapper[4811]: I0128 16:05:25.973332 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bc54361e-5f34-4253-b91f-b1683e944191","Type":"ContainerStarted","Data":"9dbb3f5632b8a35166eed8631e1ad8ce2fea3e6580b10ef5e0e18c13b29de1d4"} Jan 28 16:05:25 crc kubenswrapper[4811]: I0128 16:05:25.976960 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ad905f29-3683-466d-8acd-192cc5ac0960","Type":"ContainerStarted","Data":"fa6828f22b3be25a45cd83e03b0c456790ff1d215506294c5c624f55eae5a0fc"} Jan 28 16:05:25 crc kubenswrapper[4811]: I0128 16:05:25.997693 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=11.420054821 podStartE2EDuration="23.997667619s" podCreationTimestamp="2026-01-28 16:05:02 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.438512988 +0000 UTC m=+1205.192876571" lastFinishedPulling="2026-01-28 16:05:25.016125786 +0000 UTC m=+1217.770489369" observedRunningTime="2026-01-28 16:05:25.99439991 +0000 UTC m=+1218.748763513" watchObservedRunningTime="2026-01-28 16:05:25.997667619 +0000 UTC m=+1218.752031222" Jan 28 16:05:26 crc kubenswrapper[4811]: I0128 16:05:26.019748 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=9.661481675 podStartE2EDuration="21.019729213s" podCreationTimestamp="2026-01-28 16:05:05 +0000 UTC" firstStartedPulling="2026-01-28 16:05:13.680149268 +0000 UTC m=+1206.434512851" lastFinishedPulling="2026-01-28 16:05:25.038396796 +0000 UTC m=+1217.792760389" observedRunningTime="2026-01-28 16:05:26.016143037 +0000 UTC m=+1218.770506630" watchObservedRunningTime="2026-01-28 16:05:26.019729213 +0000 UTC m=+1218.774092796" Jan 28 16:05:26 crc kubenswrapper[4811]: I0128 16:05:26.885851 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:26 crc kubenswrapper[4811]: I0128 16:05:26.988867 4811 generic.go:334] "Generic (PLEG): container finished" podID="156027c6-3cec-4317-8267-eb234c90af40" containerID="2737e6ad87982b7277c55b34f07400b82de02f83c09921c823cc3c4906fd86e9" exitCode=0 Jan 28 16:05:26 crc kubenswrapper[4811]: I0128 16:05:26.988961 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"156027c6-3cec-4317-8267-eb234c90af40","Type":"ContainerDied","Data":"2737e6ad87982b7277c55b34f07400b82de02f83c09921c823cc3c4906fd86e9"} Jan 28 16:05:26 crc kubenswrapper[4811]: I0128 16:05:26.993292 4811 generic.go:334] "Generic (PLEG): container finished" podID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerID="518785f09823d3db909d33a7b324f729682e76e6299d31a97670582c7114b971" exitCode=0 Jan 28 16:05:26 crc kubenswrapper[4811]: I0128 16:05:26.993523 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b","Type":"ContainerDied","Data":"518785f09823d3db909d33a7b324f729682e76e6299d31a97670582c7114b971"} Jan 28 16:05:27 crc kubenswrapper[4811]: I0128 16:05:27.099728 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 28 16:05:27 crc kubenswrapper[4811]: I0128 16:05:27.480255 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:27 crc kubenswrapper[4811]: I0128 16:05:27.525123 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:27 crc kubenswrapper[4811]: I0128 16:05:27.813647 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:05:27 crc kubenswrapper[4811]: I0128 16:05:27.885644 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:27 crc kubenswrapper[4811]: I0128 16:05:27.935587 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.008199 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b","Type":"ContainerStarted","Data":"b0391cee65f30aa023e4b8a6992aae9b85400f523a386a5570a75825a412741e"} Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.036657 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"156027c6-3cec-4317-8267-eb234c90af40","Type":"ContainerStarted","Data":"6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd"} Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.037523 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.065359 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=24.652524399 podStartE2EDuration="33.065340375s" podCreationTimestamp="2026-01-28 16:04:55 +0000 UTC" firstStartedPulling="2026-01-28 16:05:11.625490452 +0000 UTC m=+1204.379854035" lastFinishedPulling="2026-01-28 16:05:20.038306428 +0000 UTC m=+1212.792670011" observedRunningTime="2026-01-28 16:05:28.06514656 +0000 UTC m=+1220.819510143" watchObservedRunningTime="2026-01-28 16:05:28.065340375 +0000 UTC m=+1220.819703958" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.089355 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.090620 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.360771958 podStartE2EDuration="34.090585927s" podCreationTimestamp="2026-01-28 16:04:54 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.310905325 +0000 UTC m=+1205.065268898" lastFinishedPulling="2026-01-28 16:05:20.040719274 +0000 UTC m=+1212.795082867" observedRunningTime="2026-01-28 16:05:28.085549801 +0000 UTC m=+1220.839913404" watchObservedRunningTime="2026-01-28 16:05:28.090585927 +0000 UTC m=+1220.844949510" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.095308 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.099582 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.186380 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-7w9pg"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.186591 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerName="dnsmasq-dns" containerID="cri-o://080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946" gracePeriod=10 Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.446282 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7878659675-9spmf"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.448472 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.458268 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.496273 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7878659675-9spmf"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.573910 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-d62hg"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.575714 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.581930 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.582163 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-d62hg"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624071 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-config\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624215 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624275 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-combined-ca-bundle\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624404 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtr46\" (UniqueName: \"kubernetes.io/projected/3e79a2a6-54aa-4533-95ee-9deb5c46811b-kube-api-access-mtr46\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624425 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624533 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovs-rundir\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624561 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e79a2a6-54aa-4533-95ee-9deb5c46811b-config\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624612 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovn-rundir\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624640 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-dns-svc\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.624764 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn78g\" (UniqueName: \"kubernetes.io/projected/40de3f43-8a7c-48a2-a326-5686170e633f-kube-api-access-zn78g\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726803 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtr46\" (UniqueName: \"kubernetes.io/projected/3e79a2a6-54aa-4533-95ee-9deb5c46811b-kube-api-access-mtr46\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726850 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726868 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovs-rundir\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726886 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e79a2a6-54aa-4533-95ee-9deb5c46811b-config\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726908 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovn-rundir\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726925 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-dns-svc\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.726955 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn78g\" (UniqueName: \"kubernetes.io/projected/40de3f43-8a7c-48a2-a326-5686170e633f-kube-api-access-zn78g\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.727004 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-config\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.727034 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.727056 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-combined-ca-bundle\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.732991 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovn-rundir\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.735009 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovs-rundir\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.736298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e79a2a6-54aa-4533-95ee-9deb5c46811b-config\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.736869 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-dns-svc\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.737128 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-config\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.737642 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.768259 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-combined-ca-bundle\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.778891 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtr46\" (UniqueName: \"kubernetes.io/projected/3e79a2a6-54aa-4533-95ee-9deb5c46811b-kube-api-access-mtr46\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.787396 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-d62hg\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.789505 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.791231 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.805354 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.805700 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.806633 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-8bnjk" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.806848 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.810018 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn78g\" (UniqueName: \"kubernetes.io/projected/40de3f43-8a7c-48a2-a326-5686170e633f-kube-api-access-zn78g\") pod \"dnsmasq-dns-7878659675-9spmf\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.816568 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-9spmf"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.817750 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836165 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836250 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-scripts\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836318 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb8t8\" (UniqueName: \"kubernetes.io/projected/6dd44d16-5b30-493c-9dd2-1ba856a4393a-kube-api-access-rb8t8\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836367 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-config\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836393 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836451 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.836486 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.866950 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.932566 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-lbxpq"] Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938443 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-config\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938506 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb8t8\" (UniqueName: \"kubernetes.io/projected/6dd44d16-5b30-493c-9dd2-1ba856a4393a-kube-api-access-rb8t8\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938534 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-dns-svc\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938561 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938584 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-config\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938603 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938629 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938654 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938675 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938694 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9jd7\" (UniqueName: \"kubernetes.io/projected/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-kube-api-access-l9jd7\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938725 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.938760 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-scripts\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.939909 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.943514 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.945217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.947315 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.955120 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-scripts\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.955239 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-config\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.960702 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.961709 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.961899 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:28 crc kubenswrapper[4811]: I0128 16:05:28.967902 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-lbxpq"] Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.013334 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.022565 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb8t8\" (UniqueName: \"kubernetes.io/projected/6dd44d16-5b30-493c-9dd2-1ba856a4393a-kube-api-access-rb8t8\") pod \"ovn-northd-0\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " pod="openstack/ovn-northd-0" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.040691 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.042324 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.042360 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9jd7\" (UniqueName: \"kubernetes.io/projected/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-kube-api-access-l9jd7\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.042553 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-config\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.042600 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-dns-svc\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.042649 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.044019 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.046975 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-dns-svc\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.047935 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.057070 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-config\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.100100 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9jd7\" (UniqueName: \"kubernetes.io/projected/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-kube-api-access-l9jd7\") pod \"dnsmasq-dns-586b989cdc-lbxpq\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.122074 4811 generic.go:334] "Generic (PLEG): container finished" podID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerID="080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946" exitCode=0 Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.126381 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.126662 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" event={"ID":"4b0dc77d-eab2-4a1a-b975-54dc6ec89453","Type":"ContainerDied","Data":"080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946"} Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.126687 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" event={"ID":"4b0dc77d-eab2-4a1a-b975-54dc6ec89453","Type":"ContainerDied","Data":"320033b6768db17ac9ec617c4e4152991afee509ba371258272034ab096f3a9b"} Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.126705 4811 scope.go:117] "RemoveContainer" containerID="080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.147920 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-config\") pod \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.255074 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-lbxpq"] Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.255763 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.257041 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-dns-svc\") pod \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.257180 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb8tr\" (UniqueName: \"kubernetes.io/projected/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-kube-api-access-fb8tr\") pod \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\" (UID: \"4b0dc77d-eab2-4a1a-b975-54dc6ec89453\") " Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.273490 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-kube-api-access-fb8tr" (OuterVolumeSpecName: "kube-api-access-fb8tr") pod "4b0dc77d-eab2-4a1a-b975-54dc6ec89453" (UID: "4b0dc77d-eab2-4a1a-b975-54dc6ec89453"). InnerVolumeSpecName "kube-api-access-fb8tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.297879 4811 scope.go:117] "RemoveContainer" containerID="62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.350739 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-kds67"] Jan 28 16:05:29 crc kubenswrapper[4811]: E0128 16:05:29.353272 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerName="dnsmasq-dns" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.353320 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerName="dnsmasq-dns" Jan 28 16:05:29 crc kubenswrapper[4811]: E0128 16:05:29.353343 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerName="init" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.353350 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerName="init" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.353578 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" containerName="dnsmasq-dns" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.356249 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.381995 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb8tr\" (UniqueName: \"kubernetes.io/projected/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-kube-api-access-fb8tr\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.405232 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-config" (OuterVolumeSpecName: "config") pod "4b0dc77d-eab2-4a1a-b975-54dc6ec89453" (UID: "4b0dc77d-eab2-4a1a-b975-54dc6ec89453"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.499585 4811 scope.go:117] "RemoveContainer" containerID="080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946" Jan 28 16:05:29 crc kubenswrapper[4811]: E0128 16:05:29.505851 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946\": container with ID starting with 080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946 not found: ID does not exist" containerID="080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.506028 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946"} err="failed to get container status \"080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946\": rpc error: code = NotFound desc = could not find container \"080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946\": container with ID starting with 080a798fc01f705d4fc8235fcd9c984afcfe8355695528a1ad8c298d999ea946 not found: ID does not exist" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.506111 4811 scope.go:117] "RemoveContainer" containerID="62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.508291 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b0dc77d-eab2-4a1a-b975-54dc6ec89453" (UID: "4b0dc77d-eab2-4a1a-b975-54dc6ec89453"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:29 crc kubenswrapper[4811]: E0128 16:05:29.517632 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1\": container with ID starting with 62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1 not found: ID does not exist" containerID="62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.521994 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1"} err="failed to get container status \"62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1\": rpc error: code = NotFound desc = could not find container \"62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1\": container with ID starting with 62e314ca91cc581bb4d8cb13ea9ec980df2f0717cce63abe2c195e2b884825f1 not found: ID does not exist" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.555347 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.555937 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0dc77d-eab2-4a1a-b975-54dc6ec89453-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.563405 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-kds67"] Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.657873 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.658117 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thgqs\" (UniqueName: \"kubernetes.io/projected/a3730224-7295-4fea-bd2e-67cad0392474-kube-api-access-thgqs\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.658142 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.658413 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.658803 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-config\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.760257 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-config\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.760718 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.760795 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thgqs\" (UniqueName: \"kubernetes.io/projected/a3730224-7295-4fea-bd2e-67cad0392474-kube-api-access-thgqs\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.760826 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.760865 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.761286 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.761684 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.761823 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.762093 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-config\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.785616 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thgqs\" (UniqueName: \"kubernetes.io/projected/a3730224-7295-4fea-bd2e-67cad0392474-kube-api-access-thgqs\") pod \"dnsmasq-dns-67fdf7998c-kds67\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.872619 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:29 crc kubenswrapper[4811]: I0128 16:05:29.903653 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-9spmf"] Jan 28 16:05:29 crc kubenswrapper[4811]: W0128 16:05:29.914510 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40de3f43_8a7c_48a2_a326_5686170e633f.slice/crio-68bb6b51431d2138d138cde8b5b8586391acec9afca34d2c64f698f40f5a374c WatchSource:0}: Error finding container 68bb6b51431d2138d138cde8b5b8586391acec9afca34d2c64f698f40f5a374c: Status 404 returned error can't find the container with id 68bb6b51431d2138d138cde8b5b8586391acec9afca34d2c64f698f40f5a374c Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.022685 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-d62hg"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.033172 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-lbxpq"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.053491 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 16:05:30 crc kubenswrapper[4811]: W0128 16:05:30.058841 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87ad91b9_ba19_424b_bc0c_85ff9d1b2de2.slice/crio-95ae29b88af6c8f1548ea07e7c334dd279d00394edc6d34b62f8412d387937a0 WatchSource:0}: Error finding container 95ae29b88af6c8f1548ea07e7c334dd279d00394edc6d34b62f8412d387937a0: Status 404 returned error can't find the container with id 95ae29b88af6c8f1548ea07e7c334dd279d00394edc6d34b62f8412d387937a0 Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.137081 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" event={"ID":"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2","Type":"ContainerStarted","Data":"95ae29b88af6c8f1548ea07e7c334dd279d00394edc6d34b62f8412d387937a0"} Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.138472 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-9spmf" event={"ID":"40de3f43-8a7c-48a2-a326-5686170e633f","Type":"ContainerStarted","Data":"68bb6b51431d2138d138cde8b5b8586391acec9afca34d2c64f698f40f5a374c"} Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.139276 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-7w9pg" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.141179 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-d62hg" event={"ID":"3e79a2a6-54aa-4533-95ee-9deb5c46811b","Type":"ContainerStarted","Data":"47666d69e88477ec11bb6fd81a9729829eb8a83752e3d8e0d01393bcf0093d9f"} Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.143565 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6dd44d16-5b30-493c-9dd2-1ba856a4393a","Type":"ContainerStarted","Data":"016870e708b72d125c622e49d11ef1c30dd8bb7e0682bc45de42857aa38e3b58"} Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.187539 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-7w9pg"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.195840 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-7w9pg"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.350349 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b0dc77d-eab2-4a1a-b975-54dc6ec89453" path="/var/lib/kubelet/pods/4b0dc77d-eab2-4a1a-b975-54dc6ec89453/volumes" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.371052 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-kds67"] Jan 28 16:05:30 crc kubenswrapper[4811]: W0128 16:05:30.393937 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3730224_7295_4fea_bd2e_67cad0392474.slice/crio-1feff88647b5793f4c6f4d8fdec21ae74f6824239500c5af16031bfd1b40a24c WatchSource:0}: Error finding container 1feff88647b5793f4c6f4d8fdec21ae74f6824239500c5af16031bfd1b40a24c: Status 404 returned error can't find the container with id 1feff88647b5793f4c6f4d8fdec21ae74f6824239500c5af16031bfd1b40a24c Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.422855 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.434299 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.443159 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-zfhdf" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.443467 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.443604 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.443728 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.460536 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.574866 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-lock\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.574912 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.574961 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.575021 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nzrl\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-kube-api-access-9nzrl\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.575066 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.575097 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-cache\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.677290 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.677502 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nzrl\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-kube-api-access-9nzrl\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.677557 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: E0128 16:05:30.677565 4811 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 28 16:05:30 crc kubenswrapper[4811]: E0128 16:05:30.677593 4811 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 28 16:05:30 crc kubenswrapper[4811]: E0128 16:05:30.677690 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift podName:84ae0602-24d6-43f7-84d5-c9aa7ed04d7b nodeName:}" failed. No retries permitted until 2026-01-28 16:05:31.177659408 +0000 UTC m=+1223.932022991 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift") pod "swift-storage-0" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b") : configmap "swift-ring-files" not found Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.677593 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-cache\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.677852 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-lock\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.677891 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.678090 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.678175 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-cache\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.678384 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-lock\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.704233 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.710380 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-z7x67"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.711705 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nzrl\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-kube-api-access-9nzrl\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.712016 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.713486 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.714784 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.715288 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.716884 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.727843 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-z7x67"] Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.779871 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-scripts\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.779959 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e06e289b-fb43-4e7d-999a-38d8a4fc2178-etc-swift\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.780012 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-ring-data-devices\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.780181 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-dispersionconf\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.780289 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-combined-ca-bundle\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.780351 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4b7p\" (UniqueName: \"kubernetes.io/projected/e06e289b-fb43-4e7d-999a-38d8a4fc2178-kube-api-access-f4b7p\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.780392 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-swiftconf\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.882266 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4b7p\" (UniqueName: \"kubernetes.io/projected/e06e289b-fb43-4e7d-999a-38d8a4fc2178-kube-api-access-f4b7p\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.882881 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-swiftconf\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.882948 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-scripts\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.882978 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e06e289b-fb43-4e7d-999a-38d8a4fc2178-etc-swift\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.883030 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-ring-data-devices\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.883127 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-dispersionconf\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.883209 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-combined-ca-bundle\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.884217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-scripts\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.884363 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-ring-data-devices\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.884613 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e06e289b-fb43-4e7d-999a-38d8a4fc2178-etc-swift\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.887245 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-swiftconf\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.893735 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-dispersionconf\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.896758 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-combined-ca-bundle\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:30 crc kubenswrapper[4811]: I0128 16:05:30.907517 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4b7p\" (UniqueName: \"kubernetes.io/projected/e06e289b-fb43-4e7d-999a-38d8a4fc2178-kube-api-access-f4b7p\") pod \"swift-ring-rebalance-z7x67\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.052090 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.158023 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" event={"ID":"a3730224-7295-4fea-bd2e-67cad0392474","Type":"ContainerStarted","Data":"1feff88647b5793f4c6f4d8fdec21ae74f6824239500c5af16031bfd1b40a24c"} Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.159344 4811 generic.go:334] "Generic (PLEG): container finished" podID="40de3f43-8a7c-48a2-a326-5686170e633f" containerID="c3576f63a918750700ccaef5bc90bb88b19f839705ec21b6de85b52c0697776f" exitCode=0 Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.159375 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-9spmf" event={"ID":"40de3f43-8a7c-48a2-a326-5686170e633f","Type":"ContainerDied","Data":"c3576f63a918750700ccaef5bc90bb88b19f839705ec21b6de85b52c0697776f"} Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.192354 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:31 crc kubenswrapper[4811]: E0128 16:05:31.192623 4811 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 28 16:05:31 crc kubenswrapper[4811]: E0128 16:05:31.192639 4811 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 28 16:05:31 crc kubenswrapper[4811]: E0128 16:05:31.192680 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift podName:84ae0602-24d6-43f7-84d5-c9aa7ed04d7b nodeName:}" failed. No retries permitted until 2026-01-28 16:05:32.192665984 +0000 UTC m=+1224.947029557 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift") pod "swift-storage-0" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b") : configmap "swift-ring-files" not found Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.717561 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.818204 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-config\") pod \"40de3f43-8a7c-48a2-a326-5686170e633f\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.818296 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-ovsdbserver-nb\") pod \"40de3f43-8a7c-48a2-a326-5686170e633f\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.818358 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn78g\" (UniqueName: \"kubernetes.io/projected/40de3f43-8a7c-48a2-a326-5686170e633f-kube-api-access-zn78g\") pod \"40de3f43-8a7c-48a2-a326-5686170e633f\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.818735 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-dns-svc\") pod \"40de3f43-8a7c-48a2-a326-5686170e633f\" (UID: \"40de3f43-8a7c-48a2-a326-5686170e633f\") " Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.823930 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40de3f43-8a7c-48a2-a326-5686170e633f-kube-api-access-zn78g" (OuterVolumeSpecName: "kube-api-access-zn78g") pod "40de3f43-8a7c-48a2-a326-5686170e633f" (UID: "40de3f43-8a7c-48a2-a326-5686170e633f"). InnerVolumeSpecName "kube-api-access-zn78g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.845103 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "40de3f43-8a7c-48a2-a326-5686170e633f" (UID: "40de3f43-8a7c-48a2-a326-5686170e633f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.845256 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "40de3f43-8a7c-48a2-a326-5686170e633f" (UID: "40de3f43-8a7c-48a2-a326-5686170e633f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.847699 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-z7x67"] Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.854033 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-config" (OuterVolumeSpecName: "config") pod "40de3f43-8a7c-48a2-a326-5686170e633f" (UID: "40de3f43-8a7c-48a2-a326-5686170e633f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:31 crc kubenswrapper[4811]: W0128 16:05:31.854141 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06e289b_fb43_4e7d_999a_38d8a4fc2178.slice/crio-b7f2b5583565e8e4c292d26bbd515d6d5c515d6f51ae0de065c99f6d1b3fe63e WatchSource:0}: Error finding container b7f2b5583565e8e4c292d26bbd515d6d5c515d6f51ae0de065c99f6d1b3fe63e: Status 404 returned error can't find the container with id b7f2b5583565e8e4c292d26bbd515d6d5c515d6f51ae0de065c99f6d1b3fe63e Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.921066 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.921092 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.921102 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40de3f43-8a7c-48a2-a326-5686170e633f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:31 crc kubenswrapper[4811]: I0128 16:05:31.921115 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn78g\" (UniqueName: \"kubernetes.io/projected/40de3f43-8a7c-48a2-a326-5686170e633f-kube-api-access-zn78g\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.167140 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-9spmf" Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.167115 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-9spmf" event={"ID":"40de3f43-8a7c-48a2-a326-5686170e633f","Type":"ContainerDied","Data":"68bb6b51431d2138d138cde8b5b8586391acec9afca34d2c64f698f40f5a374c"} Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.167600 4811 scope.go:117] "RemoveContainer" containerID="c3576f63a918750700ccaef5bc90bb88b19f839705ec21b6de85b52c0697776f" Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.168124 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z7x67" event={"ID":"e06e289b-fb43-4e7d-999a-38d8a4fc2178","Type":"ContainerStarted","Data":"b7f2b5583565e8e4c292d26bbd515d6d5c515d6f51ae0de065c99f6d1b3fe63e"} Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.219496 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-9spmf"] Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.226851 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:32 crc kubenswrapper[4811]: E0128 16:05:32.227062 4811 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 28 16:05:32 crc kubenswrapper[4811]: E0128 16:05:32.227079 4811 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 28 16:05:32 crc kubenswrapper[4811]: E0128 16:05:32.227130 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift podName:84ae0602-24d6-43f7-84d5-c9aa7ed04d7b nodeName:}" failed. No retries permitted until 2026-01-28 16:05:34.227113194 +0000 UTC m=+1226.981476777 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift") pod "swift-storage-0" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b") : configmap "swift-ring-files" not found Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.232560 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7878659675-9spmf"] Jan 28 16:05:32 crc kubenswrapper[4811]: I0128 16:05:32.350237 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40de3f43-8a7c-48a2-a326-5686170e633f" path="/var/lib/kubelet/pods/40de3f43-8a7c-48a2-a326-5686170e633f/volumes" Jan 28 16:05:34 crc kubenswrapper[4811]: I0128 16:05:34.260835 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:34 crc kubenswrapper[4811]: E0128 16:05:34.261160 4811 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 28 16:05:34 crc kubenswrapper[4811]: E0128 16:05:34.261356 4811 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 28 16:05:34 crc kubenswrapper[4811]: E0128 16:05:34.261444 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift podName:84ae0602-24d6-43f7-84d5-c9aa7ed04d7b nodeName:}" failed. No retries permitted until 2026-01-28 16:05:38.261405771 +0000 UTC m=+1231.015769374 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift") pod "swift-storage-0" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b") : configmap "swift-ring-files" not found Jan 28 16:05:35 crc kubenswrapper[4811]: I0128 16:05:35.455467 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 28 16:05:35 crc kubenswrapper[4811]: I0128 16:05:35.455848 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 28 16:05:36 crc kubenswrapper[4811]: I0128 16:05:36.827934 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 28 16:05:36 crc kubenswrapper[4811]: I0128 16:05:36.828004 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 28 16:05:38 crc kubenswrapper[4811]: I0128 16:05:38.333768 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:38 crc kubenswrapper[4811]: E0128 16:05:38.333962 4811 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 28 16:05:38 crc kubenswrapper[4811]: E0128 16:05:38.334249 4811 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 28 16:05:38 crc kubenswrapper[4811]: E0128 16:05:38.334320 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift podName:84ae0602-24d6-43f7-84d5-c9aa7ed04d7b nodeName:}" failed. No retries permitted until 2026-01-28 16:05:46.334298632 +0000 UTC m=+1239.088662215 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift") pod "swift-storage-0" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b") : configmap "swift-ring-files" not found Jan 28 16:05:43 crc kubenswrapper[4811]: I0128 16:05:43.260830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-d62hg" event={"ID":"3e79a2a6-54aa-4533-95ee-9deb5c46811b","Type":"ContainerStarted","Data":"2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80"} Jan 28 16:05:43 crc kubenswrapper[4811]: E0128 16:05:43.602713 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = writing blob: storing blob to file \"/var/tmp/container_images_storage1968931775/1\": happened during read: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:425ebddc9d6851ee9c730e67eaf43039943dc7937fb11332a41335a9114b2d44" Jan 28 16:05:43 crc kubenswrapper[4811]: E0128 16:05:43.602917 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-northd,Image:quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:425ebddc9d6851ee9c730e67eaf43039943dc7937fb11332a41335a9114b2d44,Command:[/usr/bin/ovn-northd],Args:[-vfile:off -vconsole:info --n-threads=1 --ovnnb-db=ssl:ovsdbserver-nb-0.openstack.svc.cluster.local:6641 --ovnsb-db=ssl:ovsdbserver-sb-0.openstack.svc.cluster.local:6642 --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n67chf5h5d6h5d8h68ch599h645hb7hch557h5fbh645hc9hc8hfdh548h85h5cch6dh558h7h54ch548h59dhcfh5d7h55chb7hb7hd4h5c5h58cq,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:certs,Value:n88h549h95h697h544h65ch5d4h567h575h5b9h77h5b8h5cdh5fdh594h647h544h56h7dh7ch54ch6fh5d7h5d7h585hc6hcdh5c9h585h577h677hdcq,ValueFrom:nil,},EnvVar{Name:certs_metrics,Value:n5b4h5bfh65fh677h7bh5bdh55h66dh669hf7hffhb7h4h78h57h589h644h55h66bh669h68ch9bh54bh685h557h569h644h688h5f7h5f7hdch5b6q,ValueFrom:nil,},EnvVar{Name:ovnnorthd-config,Value:n5c8h7ch56bh8dh8hc4h5dch9dh68h6bhb7h598h549h5dbh66fh6bh5b4h5cch5d6h55ch57fhfch588h89h5ddh5d6h65bh65bh8dhc4h67dh569q,ValueFrom:nil,},EnvVar{Name:ovnnorthd-scripts,Value:n664hd8h66ch58dh64hc9h66bhd4h558h697h67bh557hdch664h567h669h555h696h556h556h5fh5bh569hbh665h9dh4h9bh564hc8h5b7h5c4q,ValueFrom:nil,},EnvVar{Name:tls-ca-bundle.pem,Value:n57bh556h67dh57dh86h5d8h5b5h59dh59h654h667h686h569h7fhb8hf4h79h587h59ch695h59fh5f4h564h5fch5f4hb6h645h5cfh9ch6dhc7hdbq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-northd-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-northd-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-northd-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rb8t8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/status_check.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/status_check.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-northd-0_openstack(6dd44d16-5b30-493c-9dd2-1ba856a4393a): ErrImagePull: rpc error: code = Canceled desc = writing blob: storing blob to file \"/var/tmp/container_images_storage1968931775/1\": happened during read: context canceled" logger="UnhandledError" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.271661 4811 generic.go:334] "Generic (PLEG): container finished" podID="a3730224-7295-4fea-bd2e-67cad0392474" containerID="a99e2ad139d7f0c192083433e961eb9268a4901c34a54ff538306acea713ce1b" exitCode=0 Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.273523 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" event={"ID":"a3730224-7295-4fea-bd2e-67cad0392474","Type":"ContainerDied","Data":"a99e2ad139d7f0c192083433e961eb9268a4901c34a54ff538306acea713ce1b"} Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.278086 4811 generic.go:334] "Generic (PLEG): container finished" podID="87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" containerID="fdec78061ee811927e467696e9db6f6db0aa38b8f7903fcb56c3201c0713ac43" exitCode=0 Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.278584 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" event={"ID":"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2","Type":"ContainerDied","Data":"fdec78061ee811927e467696e9db6f6db0aa38b8f7903fcb56c3201c0713ac43"} Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.316321 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-d62hg" podStartSLOduration=16.316299501 podStartE2EDuration="16.316299501s" podCreationTimestamp="2026-01-28 16:05:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:44.312557489 +0000 UTC m=+1237.066921072" watchObservedRunningTime="2026-01-28 16:05:44.316299501 +0000 UTC m=+1237.070663094" Jan 28 16:05:44 crc kubenswrapper[4811]: E0128 16:05:44.585539 4811 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.233:46786->38.102.83.233:37263: write tcp 192.168.126.11:10250->192.168.126.11:42990: write: connection reset by peer Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.824212 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.870590 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-nb\") pod \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.870651 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-config\") pod \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.870721 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-dns-svc\") pod \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.870804 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9jd7\" (UniqueName: \"kubernetes.io/projected/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-kube-api-access-l9jd7\") pod \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.870897 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-sb\") pod \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\" (UID: \"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2\") " Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.879272 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-kube-api-access-l9jd7" (OuterVolumeSpecName: "kube-api-access-l9jd7") pod "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" (UID: "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2"). InnerVolumeSpecName "kube-api-access-l9jd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.893463 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-config" (OuterVolumeSpecName: "config") pod "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" (UID: "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.900891 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" (UID: "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.900977 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" (UID: "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.903816 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" (UID: "87ad91b9-ba19-424b-bc0c-85ff9d1b2de2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.972696 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.972728 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.972738 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.972745 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:44 crc kubenswrapper[4811]: I0128 16:05:44.972754 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9jd7\" (UniqueName: \"kubernetes.io/projected/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2-kube-api-access-l9jd7\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:45 crc kubenswrapper[4811]: I0128 16:05:45.288235 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" event={"ID":"87ad91b9-ba19-424b-bc0c-85ff9d1b2de2","Type":"ContainerDied","Data":"95ae29b88af6c8f1548ea07e7c334dd279d00394edc6d34b62f8412d387937a0"} Jan 28 16:05:45 crc kubenswrapper[4811]: I0128 16:05:45.288297 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-lbxpq" Jan 28 16:05:45 crc kubenswrapper[4811]: I0128 16:05:45.288319 4811 scope.go:117] "RemoveContainer" containerID="fdec78061ee811927e467696e9db6f6db0aa38b8f7903fcb56c3201c0713ac43" Jan 28 16:05:45 crc kubenswrapper[4811]: I0128 16:05:45.372588 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-lbxpq"] Jan 28 16:05:45 crc kubenswrapper[4811]: I0128 16:05:45.380084 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-lbxpq"] Jan 28 16:05:46 crc kubenswrapper[4811]: I0128 16:05:46.351308 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" path="/var/lib/kubelet/pods/87ad91b9-ba19-424b-bc0c-85ff9d1b2de2/volumes" Jan 28 16:05:46 crc kubenswrapper[4811]: I0128 16:05:46.394410 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:05:46 crc kubenswrapper[4811]: E0128 16:05:46.394649 4811 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 28 16:05:46 crc kubenswrapper[4811]: E0128 16:05:46.394668 4811 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 28 16:05:46 crc kubenswrapper[4811]: E0128 16:05:46.394727 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift podName:84ae0602-24d6-43f7-84d5-c9aa7ed04d7b nodeName:}" failed. No retries permitted until 2026-01-28 16:06:02.394710378 +0000 UTC m=+1255.149073961 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift") pod "swift-storage-0" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b") : configmap "swift-ring-files" not found Jan 28 16:05:46 crc kubenswrapper[4811]: I0128 16:05:46.993665 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.099776 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 28 16:05:47 crc kubenswrapper[4811]: E0128 16:05:47.205828 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-northd\" with ErrImagePull: \"rpc error: code = Canceled desc = writing blob: storing blob to file \\\"/var/tmp/container_images_storage1968931775/1\\\": happened during read: context canceled\"" pod="openstack/ovn-northd-0" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.304453 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" event={"ID":"a3730224-7295-4fea-bd2e-67cad0392474","Type":"ContainerStarted","Data":"723c3c1f133020bbd2e12c0cc5bd06cc9712218d2f07e7e49921785e8eb9e001"} Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.304618 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.306492 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z7x67" event={"ID":"e06e289b-fb43-4e7d-999a-38d8a4fc2178","Type":"ContainerStarted","Data":"945e5bdfc0446fe693f7c19ef23a0eb10103bbb962463cff45c55bc97923580b"} Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.313915 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6dd44d16-5b30-493c-9dd2-1ba856a4393a","Type":"ContainerStarted","Data":"1112ae05c1cf02d134ea38159d36c4ca6bf5c3e27f26f8a7397f9fd2b51e726b"} Jan 28 16:05:47 crc kubenswrapper[4811]: E0128 16:05:47.315124 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-northd\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:425ebddc9d6851ee9c730e67eaf43039943dc7937fb11332a41335a9114b2d44\\\"\"" pod="openstack/ovn-northd-0" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.331572 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" podStartSLOduration=18.331549795 podStartE2EDuration="18.331549795s" podCreationTimestamp="2026-01-28 16:05:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:47.322935752 +0000 UTC m=+1240.077299335" watchObservedRunningTime="2026-01-28 16:05:47.331549795 +0000 UTC m=+1240.085913408" Jan 28 16:05:47 crc kubenswrapper[4811]: I0128 16:05:47.348899 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-z7x67" podStartSLOduration=2.234349559 podStartE2EDuration="17.348881763s" podCreationTimestamp="2026-01-28 16:05:30 +0000 UTC" firstStartedPulling="2026-01-28 16:05:31.856835463 +0000 UTC m=+1224.611199046" lastFinishedPulling="2026-01-28 16:05:46.971367667 +0000 UTC m=+1239.725731250" observedRunningTime="2026-01-28 16:05:47.34062702 +0000 UTC m=+1240.094990623" watchObservedRunningTime="2026-01-28 16:05:47.348881763 +0000 UTC m=+1240.103245346" Jan 28 16:05:48 crc kubenswrapper[4811]: E0128 16:05:48.323375 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-northd\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:425ebddc9d6851ee9c730e67eaf43039943dc7937fb11332a41335a9114b2d44\\\"\"" pod="openstack/ovn-northd-0" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" Jan 28 16:05:49 crc kubenswrapper[4811]: I0128 16:05:49.576480 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 28 16:05:49 crc kubenswrapper[4811]: I0128 16:05:49.662980 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.213669 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.223414 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.274177 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-fxxlf" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" probeResult="failure" output=< Jan 28 16:05:52 crc kubenswrapper[4811]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 28 16:05:52 crc kubenswrapper[4811]: > Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.447125 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fxxlf-config-qm27n"] Jan 28 16:05:52 crc kubenswrapper[4811]: E0128 16:05:52.447466 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" containerName="init" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.447482 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" containerName="init" Jan 28 16:05:52 crc kubenswrapper[4811]: E0128 16:05:52.447500 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40de3f43-8a7c-48a2-a326-5686170e633f" containerName="init" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.447509 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="40de3f43-8a7c-48a2-a326-5686170e633f" containerName="init" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.447662 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="40de3f43-8a7c-48a2-a326-5686170e633f" containerName="init" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.447687 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ad91b9-ba19-424b-bc0c-85ff9d1b2de2" containerName="init" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.448188 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.450336 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.463973 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fxxlf-config-qm27n"] Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.605678 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9wpl\" (UniqueName: \"kubernetes.io/projected/0b6a4839-f828-40eb-ad65-4edf33d52160-kube-api-access-g9wpl\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.605847 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.605877 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-additional-scripts\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.605909 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run-ovn\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.605977 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-log-ovn\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.606211 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-scripts\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.709084 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.709695 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-additional-scripts\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.709739 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run-ovn\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.709490 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.709870 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-log-ovn\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.709955 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-scripts\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.710024 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9wpl\" (UniqueName: \"kubernetes.io/projected/0b6a4839-f828-40eb-ad65-4edf33d52160-kube-api-access-g9wpl\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.710210 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-log-ovn\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.710281 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run-ovn\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.710770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-additional-scripts\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.712067 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-scripts\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.733144 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9wpl\" (UniqueName: \"kubernetes.io/projected/0b6a4839-f828-40eb-ad65-4edf33d52160-kube-api-access-g9wpl\") pod \"ovn-controller-fxxlf-config-qm27n\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:52 crc kubenswrapper[4811]: I0128 16:05:52.770267 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:53 crc kubenswrapper[4811]: I0128 16:05:53.280007 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fxxlf-config-qm27n"] Jan 28 16:05:53 crc kubenswrapper[4811]: I0128 16:05:53.369564 4811 generic.go:334] "Generic (PLEG): container finished" podID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerID="28aa321bf0e3ef329793682970b67cddf5dcdb81c24e0b66b553354710937d33" exitCode=0 Jan 28 16:05:53 crc kubenswrapper[4811]: I0128 16:05:53.369644 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"23095127-8b86-445a-8c32-1e6bc14bf05e","Type":"ContainerDied","Data":"28aa321bf0e3ef329793682970b67cddf5dcdb81c24e0b66b553354710937d33"} Jan 28 16:05:53 crc kubenswrapper[4811]: I0128 16:05:53.371571 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-qm27n" event={"ID":"0b6a4839-f828-40eb-ad65-4edf33d52160","Type":"ContainerStarted","Data":"e0a535654604bcc5f8d8dc52e188448c8dc379d08a7fa9a185df5665575204d6"} Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.187658 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-jkpn7"] Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.189155 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.191884 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.202312 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-jkpn7"] Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.339916 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfsrd\" (UniqueName: \"kubernetes.io/projected/8731a583-f443-4981-adf4-04431f77da50-kube-api-access-vfsrd\") pod \"root-account-create-update-jkpn7\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.340364 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8731a583-f443-4981-adf4-04431f77da50-operator-scripts\") pod \"root-account-create-update-jkpn7\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.383827 4811 generic.go:334] "Generic (PLEG): container finished" podID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerID="750be0d18f938176efd1aca389b486dcb19e5d7b9966a3f4fb7d1aa4cce062d6" exitCode=0 Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.384313 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3b7599aa-7114-46c9-bf38-b6d735b75326","Type":"ContainerDied","Data":"750be0d18f938176efd1aca389b486dcb19e5d7b9966a3f4fb7d1aa4cce062d6"} Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.388296 4811 generic.go:334] "Generic (PLEG): container finished" podID="e06e289b-fb43-4e7d-999a-38d8a4fc2178" containerID="945e5bdfc0446fe693f7c19ef23a0eb10103bbb962463cff45c55bc97923580b" exitCode=0 Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.388377 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z7x67" event={"ID":"e06e289b-fb43-4e7d-999a-38d8a4fc2178","Type":"ContainerDied","Data":"945e5bdfc0446fe693f7c19ef23a0eb10103bbb962463cff45c55bc97923580b"} Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.392220 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"23095127-8b86-445a-8c32-1e6bc14bf05e","Type":"ContainerStarted","Data":"783bfe55389f6600eee189683b41f4d30b0ec2595be48aebc9462d419fec02fc"} Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.392833 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.394947 4811 generic.go:334] "Generic (PLEG): container finished" podID="0b6a4839-f828-40eb-ad65-4edf33d52160" containerID="e99139d534fdaf6bb0e79ccd58352c7ed2618f532466db2fb7b3493e6ba9b410" exitCode=0 Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.394987 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-qm27n" event={"ID":"0b6a4839-f828-40eb-ad65-4edf33d52160","Type":"ContainerDied","Data":"e99139d534fdaf6bb0e79ccd58352c7ed2618f532466db2fb7b3493e6ba9b410"} Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.441632 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfsrd\" (UniqueName: \"kubernetes.io/projected/8731a583-f443-4981-adf4-04431f77da50-kube-api-access-vfsrd\") pod \"root-account-create-update-jkpn7\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.441727 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8731a583-f443-4981-adf4-04431f77da50-operator-scripts\") pod \"root-account-create-update-jkpn7\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.443072 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8731a583-f443-4981-adf4-04431f77da50-operator-scripts\") pod \"root-account-create-update-jkpn7\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.464166 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfsrd\" (UniqueName: \"kubernetes.io/projected/8731a583-f443-4981-adf4-04431f77da50-kube-api-access-vfsrd\") pod \"root-account-create-update-jkpn7\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.484041 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=54.770213518 podStartE2EDuration="1m2.484023465s" podCreationTimestamp="2026-01-28 16:04:52 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.315055197 +0000 UTC m=+1205.069418780" lastFinishedPulling="2026-01-28 16:05:20.028865144 +0000 UTC m=+1212.783228727" observedRunningTime="2026-01-28 16:05:54.47419995 +0000 UTC m=+1247.228563533" watchObservedRunningTime="2026-01-28 16:05:54.484023465 +0000 UTC m=+1247.238387048" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.507263 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.874631 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.982863 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6ztk9"] Jan 28 16:05:54 crc kubenswrapper[4811]: I0128 16:05:54.983534 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerName="dnsmasq-dns" containerID="cri-o://f4818e02d5dde7e04b51695018c6c3077d70d55037f32f37689cd0aa7713cdd7" gracePeriod=10 Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.000668 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-jkpn7"] Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.414565 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3b7599aa-7114-46c9-bf38-b6d735b75326","Type":"ContainerStarted","Data":"e7577902b3c5f2e0c6da1d3f007db93314f77eb274a3406fc13d0cf0a0a1d27e"} Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.415601 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.421747 4811 generic.go:334] "Generic (PLEG): container finished" podID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerID="f4818e02d5dde7e04b51695018c6c3077d70d55037f32f37689cd0aa7713cdd7" exitCode=0 Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.421812 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" event={"ID":"a6313906-9e80-41ce-b4c1-2f673d09425b","Type":"ContainerDied","Data":"f4818e02d5dde7e04b51695018c6c3077d70d55037f32f37689cd0aa7713cdd7"} Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.426005 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jkpn7" event={"ID":"8731a583-f443-4981-adf4-04431f77da50","Type":"ContainerStarted","Data":"80a47d6d78e960264bb33e177da16fbe4abd33aedd8c3332b69b669903b0efc8"} Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.426116 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jkpn7" event={"ID":"8731a583-f443-4981-adf4-04431f77da50","Type":"ContainerStarted","Data":"1340b079a26544a54e6b247bfc24891c5c252cffe3969bf27df3185a9e9da52e"} Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.460744 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=55.724542426 podStartE2EDuration="1m3.460725766s" podCreationTimestamp="2026-01-28 16:04:52 +0000 UTC" firstStartedPulling="2026-01-28 16:05:12.312727675 +0000 UTC m=+1205.067091258" lastFinishedPulling="2026-01-28 16:05:20.048911015 +0000 UTC m=+1212.803274598" observedRunningTime="2026-01-28 16:05:55.459161834 +0000 UTC m=+1248.213525417" watchObservedRunningTime="2026-01-28 16:05:55.460725766 +0000 UTC m=+1248.215089349" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.519819 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.521796 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-jkpn7" podStartSLOduration=1.521767954 podStartE2EDuration="1.521767954s" podCreationTimestamp="2026-01-28 16:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:55.485800363 +0000 UTC m=+1248.240163946" watchObservedRunningTime="2026-01-28 16:05:55.521767954 +0000 UTC m=+1248.276131537" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.670215 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-config\") pod \"a6313906-9e80-41ce-b4c1-2f673d09425b\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.670264 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sf5dv\" (UniqueName: \"kubernetes.io/projected/a6313906-9e80-41ce-b4c1-2f673d09425b-kube-api-access-sf5dv\") pod \"a6313906-9e80-41ce-b4c1-2f673d09425b\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.670314 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-dns-svc\") pod \"a6313906-9e80-41ce-b4c1-2f673d09425b\" (UID: \"a6313906-9e80-41ce-b4c1-2f673d09425b\") " Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.676316 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6313906-9e80-41ce-b4c1-2f673d09425b-kube-api-access-sf5dv" (OuterVolumeSpecName: "kube-api-access-sf5dv") pod "a6313906-9e80-41ce-b4c1-2f673d09425b" (UID: "a6313906-9e80-41ce-b4c1-2f673d09425b"). InnerVolumeSpecName "kube-api-access-sf5dv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.724161 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-config" (OuterVolumeSpecName: "config") pod "a6313906-9e80-41ce-b4c1-2f673d09425b" (UID: "a6313906-9e80-41ce-b4c1-2f673d09425b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.734689 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6313906-9e80-41ce-b4c1-2f673d09425b" (UID: "a6313906-9e80-41ce-b4c1-2f673d09425b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.772941 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.772998 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sf5dv\" (UniqueName: \"kubernetes.io/projected/a6313906-9e80-41ce-b4c1-2f673d09425b-kube-api-access-sf5dv\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.773013 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6313906-9e80-41ce-b4c1-2f673d09425b-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.972085 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:55 crc kubenswrapper[4811]: I0128 16:05:55.979352 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078168 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-dispersionconf\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078331 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4b7p\" (UniqueName: \"kubernetes.io/projected/e06e289b-fb43-4e7d-999a-38d8a4fc2178-kube-api-access-f4b7p\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078359 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-additional-scripts\") pod \"0b6a4839-f828-40eb-ad65-4edf33d52160\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078403 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9wpl\" (UniqueName: \"kubernetes.io/projected/0b6a4839-f828-40eb-ad65-4edf33d52160-kube-api-access-g9wpl\") pod \"0b6a4839-f828-40eb-ad65-4edf33d52160\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078423 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-scripts\") pod \"0b6a4839-f828-40eb-ad65-4edf33d52160\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078457 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-swiftconf\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078475 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-scripts\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078519 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-ring-data-devices\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078568 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e06e289b-fb43-4e7d-999a-38d8a4fc2178-etc-swift\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078597 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run-ovn\") pod \"0b6a4839-f828-40eb-ad65-4edf33d52160\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078647 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-log-ovn\") pod \"0b6a4839-f828-40eb-ad65-4edf33d52160\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078692 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-combined-ca-bundle\") pod \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\" (UID: \"e06e289b-fb43-4e7d-999a-38d8a4fc2178\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.078750 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run\") pod \"0b6a4839-f828-40eb-ad65-4edf33d52160\" (UID: \"0b6a4839-f828-40eb-ad65-4edf33d52160\") " Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079053 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "0b6a4839-f828-40eb-ad65-4edf33d52160" (UID: "0b6a4839-f828-40eb-ad65-4edf33d52160"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079168 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run" (OuterVolumeSpecName: "var-run") pod "0b6a4839-f828-40eb-ad65-4edf33d52160" (UID: "0b6a4839-f828-40eb-ad65-4edf33d52160"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079206 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "0b6a4839-f828-40eb-ad65-4edf33d52160" (UID: "0b6a4839-f828-40eb-ad65-4edf33d52160"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079556 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079607 4811 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079626 4811 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.079638 4811 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a4839-f828-40eb-ad65-4edf33d52160-var-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.080359 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "0b6a4839-f828-40eb-ad65-4edf33d52160" (UID: "0b6a4839-f828-40eb-ad65-4edf33d52160"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.080628 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e06e289b-fb43-4e7d-999a-38d8a4fc2178-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.081602 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-scripts" (OuterVolumeSpecName: "scripts") pod "0b6a4839-f828-40eb-ad65-4edf33d52160" (UID: "0b6a4839-f828-40eb-ad65-4edf33d52160"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.094694 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.103288 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.105671 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-scripts" (OuterVolumeSpecName: "scripts") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.107596 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e06e289b-fb43-4e7d-999a-38d8a4fc2178-kube-api-access-f4b7p" (OuterVolumeSpecName: "kube-api-access-f4b7p") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "kube-api-access-f4b7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.107660 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b6a4839-f828-40eb-ad65-4edf33d52160-kube-api-access-g9wpl" (OuterVolumeSpecName: "kube-api-access-g9wpl") pod "0b6a4839-f828-40eb-ad65-4edf33d52160" (UID: "0b6a4839-f828-40eb-ad65-4edf33d52160"). InnerVolumeSpecName "kube-api-access-g9wpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.118900 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "e06e289b-fb43-4e7d-999a-38d8a4fc2178" (UID: "e06e289b-fb43-4e7d-999a-38d8a4fc2178"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181258 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4b7p\" (UniqueName: \"kubernetes.io/projected/e06e289b-fb43-4e7d-999a-38d8a4fc2178-kube-api-access-f4b7p\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181295 4811 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181303 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9wpl\" (UniqueName: \"kubernetes.io/projected/0b6a4839-f828-40eb-ad65-4edf33d52160-kube-api-access-g9wpl\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181313 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a4839-f828-40eb-ad65-4edf33d52160-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181324 4811 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181333 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181341 4811 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e06e289b-fb43-4e7d-999a-38d8a4fc2178-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181349 4811 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e06e289b-fb43-4e7d-999a-38d8a4fc2178-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181358 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.181366 4811 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e06e289b-fb43-4e7d-999a-38d8a4fc2178-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.434032 4811 generic.go:334] "Generic (PLEG): container finished" podID="8731a583-f443-4981-adf4-04431f77da50" containerID="80a47d6d78e960264bb33e177da16fbe4abd33aedd8c3332b69b669903b0efc8" exitCode=0 Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.434412 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jkpn7" event={"ID":"8731a583-f443-4981-adf4-04431f77da50","Type":"ContainerDied","Data":"80a47d6d78e960264bb33e177da16fbe4abd33aedd8c3332b69b669903b0efc8"} Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.437452 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-qm27n" event={"ID":"0b6a4839-f828-40eb-ad65-4edf33d52160","Type":"ContainerDied","Data":"e0a535654604bcc5f8d8dc52e188448c8dc379d08a7fa9a185df5665575204d6"} Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.437499 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0a535654604bcc5f8d8dc52e188448c8dc379d08a7fa9a185df5665575204d6" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.437561 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-qm27n" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.441068 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z7x67" event={"ID":"e06e289b-fb43-4e7d-999a-38d8a4fc2178","Type":"ContainerDied","Data":"b7f2b5583565e8e4c292d26bbd515d6d5c515d6f51ae0de065c99f6d1b3fe63e"} Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.441098 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z7x67" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.441109 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7f2b5583565e8e4c292d26bbd515d6d5c515d6f51ae0de065c99f6d1b3fe63e" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.444406 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" event={"ID":"a6313906-9e80-41ce-b4c1-2f673d09425b","Type":"ContainerDied","Data":"92e6abdb6443ea186120b3948813d56e66dc31894971b27cd0a8e56c332d9f87"} Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.444515 4811 scope.go:117] "RemoveContainer" containerID="f4818e02d5dde7e04b51695018c6c3077d70d55037f32f37689cd0aa7713cdd7" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.444445 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6ztk9" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.466209 4811 scope.go:117] "RemoveContainer" containerID="6c88cb44506b2bebe3348f7665aa8d4cbbc47bdb5fbce7f8a76755269c9e1823" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.484155 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6ztk9"] Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.491744 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6ztk9"] Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633146 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-dv6jj"] Jan 28 16:05:56 crc kubenswrapper[4811]: E0128 16:05:56.633489 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerName="dnsmasq-dns" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633502 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerName="dnsmasq-dns" Jan 28 16:05:56 crc kubenswrapper[4811]: E0128 16:05:56.633515 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b6a4839-f828-40eb-ad65-4edf33d52160" containerName="ovn-config" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633521 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b6a4839-f828-40eb-ad65-4edf33d52160" containerName="ovn-config" Jan 28 16:05:56 crc kubenswrapper[4811]: E0128 16:05:56.633540 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerName="init" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633546 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerName="init" Jan 28 16:05:56 crc kubenswrapper[4811]: E0128 16:05:56.633572 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06e289b-fb43-4e7d-999a-38d8a4fc2178" containerName="swift-ring-rebalance" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633578 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06e289b-fb43-4e7d-999a-38d8a4fc2178" containerName="swift-ring-rebalance" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633727 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e06e289b-fb43-4e7d-999a-38d8a4fc2178" containerName="swift-ring-rebalance" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633738 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b6a4839-f828-40eb-ad65-4edf33d52160" containerName="ovn-config" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.633755 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" containerName="dnsmasq-dns" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.634242 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.647625 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-dv6jj"] Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.693223 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/411c0e6f-b062-4538-8fda-434912e7c73d-operator-scripts\") pod \"keystone-db-create-dv6jj\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.693349 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p4mj\" (UniqueName: \"kubernetes.io/projected/411c0e6f-b062-4538-8fda-434912e7c73d-kube-api-access-5p4mj\") pod \"keystone-db-create-dv6jj\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.778222 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9b5c-account-create-update-4frsd"] Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.779208 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.782962 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.793116 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b5c-account-create-update-4frsd"] Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.794985 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p4mj\" (UniqueName: \"kubernetes.io/projected/411c0e6f-b062-4538-8fda-434912e7c73d-kube-api-access-5p4mj\") pod \"keystone-db-create-dv6jj\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.795133 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/411c0e6f-b062-4538-8fda-434912e7c73d-operator-scripts\") pod \"keystone-db-create-dv6jj\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.795960 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/411c0e6f-b062-4538-8fda-434912e7c73d-operator-scripts\") pod \"keystone-db-create-dv6jj\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.819388 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p4mj\" (UniqueName: \"kubernetes.io/projected/411c0e6f-b062-4538-8fda-434912e7c73d-kube-api-access-5p4mj\") pod \"keystone-db-create-dv6jj\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.896508 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a654e3-54e7-4464-b0b8-683f5f809345-operator-scripts\") pod \"keystone-9b5c-account-create-update-4frsd\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.896550 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps4gr\" (UniqueName: \"kubernetes.io/projected/63a654e3-54e7-4464-b0b8-683f5f809345-kube-api-access-ps4gr\") pod \"keystone-9b5c-account-create-update-4frsd\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.948885 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dv6jj" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.998714 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a654e3-54e7-4464-b0b8-683f5f809345-operator-scripts\") pod \"keystone-9b5c-account-create-update-4frsd\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.998769 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps4gr\" (UniqueName: \"kubernetes.io/projected/63a654e3-54e7-4464-b0b8-683f5f809345-kube-api-access-ps4gr\") pod \"keystone-9b5c-account-create-update-4frsd\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:56 crc kubenswrapper[4811]: I0128 16:05:56.999582 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a654e3-54e7-4464-b0b8-683f5f809345-operator-scripts\") pod \"keystone-9b5c-account-create-update-4frsd\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.003221 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9sxhc"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.004398 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.032369 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c7ca-account-create-update-mmwlc"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.037738 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps4gr\" (UniqueName: \"kubernetes.io/projected/63a654e3-54e7-4464-b0b8-683f5f809345-kube-api-access-ps4gr\") pod \"keystone-9b5c-account-create-update-4frsd\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.041658 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.063232 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.068506 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9sxhc"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.095818 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c7ca-account-create-update-mmwlc"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.099955 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln9sx\" (UniqueName: \"kubernetes.io/projected/a6f56fef-7365-409e-beaa-1b91d5ea44c4-kube-api-access-ln9sx\") pod \"placement-db-create-9sxhc\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.100220 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6f56fef-7365-409e-beaa-1b91d5ea44c4-operator-scripts\") pod \"placement-db-create-9sxhc\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.100304 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw48x\" (UniqueName: \"kubernetes.io/projected/b38aa2ec-ec93-453c-8a6f-7943114f5047-kube-api-access-pw48x\") pod \"placement-c7ca-account-create-update-mmwlc\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.100417 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b38aa2ec-ec93-453c-8a6f-7943114f5047-operator-scripts\") pod \"placement-c7ca-account-create-update-mmwlc\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.107049 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.210104 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln9sx\" (UniqueName: \"kubernetes.io/projected/a6f56fef-7365-409e-beaa-1b91d5ea44c4-kube-api-access-ln9sx\") pod \"placement-db-create-9sxhc\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.210186 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6f56fef-7365-409e-beaa-1b91d5ea44c4-operator-scripts\") pod \"placement-db-create-9sxhc\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.210218 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw48x\" (UniqueName: \"kubernetes.io/projected/b38aa2ec-ec93-453c-8a6f-7943114f5047-kube-api-access-pw48x\") pod \"placement-c7ca-account-create-update-mmwlc\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.210333 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b38aa2ec-ec93-453c-8a6f-7943114f5047-operator-scripts\") pod \"placement-c7ca-account-create-update-mmwlc\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.211174 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6f56fef-7365-409e-beaa-1b91d5ea44c4-operator-scripts\") pod \"placement-db-create-9sxhc\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.214218 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b38aa2ec-ec93-453c-8a6f-7943114f5047-operator-scripts\") pod \"placement-c7ca-account-create-update-mmwlc\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.255404 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fxxlf-config-qm27n"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.261366 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw48x\" (UniqueName: \"kubernetes.io/projected/b38aa2ec-ec93-453c-8a6f-7943114f5047-kube-api-access-pw48x\") pod \"placement-c7ca-account-create-update-mmwlc\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.262060 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln9sx\" (UniqueName: \"kubernetes.io/projected/a6f56fef-7365-409e-beaa-1b91d5ea44c4-kube-api-access-ln9sx\") pod \"placement-db-create-9sxhc\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.273660 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-fxxlf-config-qm27n"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.310662 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-k2v5v"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.312313 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.319071 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-fxxlf" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.371915 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k2v5v"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.405920 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9sxhc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.417750 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf4mx\" (UniqueName: \"kubernetes.io/projected/ce03e462-76ab-488d-81d9-578a4105320c-kube-api-access-pf4mx\") pod \"glance-db-create-k2v5v\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.417825 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce03e462-76ab-488d-81d9-578a4105320c-operator-scripts\") pod \"glance-db-create-k2v5v\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.424318 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.428730 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-c25e-account-create-update-r58t5"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.430322 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.441904 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.519686 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hjg5\" (UniqueName: \"kubernetes.io/projected/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-kube-api-access-6hjg5\") pod \"glance-c25e-account-create-update-r58t5\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.520137 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce03e462-76ab-488d-81d9-578a4105320c-operator-scripts\") pod \"glance-db-create-k2v5v\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.520296 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-operator-scripts\") pod \"glance-c25e-account-create-update-r58t5\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.520390 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf4mx\" (UniqueName: \"kubernetes.io/projected/ce03e462-76ab-488d-81d9-578a4105320c-kube-api-access-pf4mx\") pod \"glance-db-create-k2v5v\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.521336 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce03e462-76ab-488d-81d9-578a4105320c-operator-scripts\") pod \"glance-db-create-k2v5v\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.556280 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c25e-account-create-update-r58t5"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.567290 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf4mx\" (UniqueName: \"kubernetes.io/projected/ce03e462-76ab-488d-81d9-578a4105320c-kube-api-access-pf4mx\") pod \"glance-db-create-k2v5v\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.618886 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fxxlf-config-d56bg"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.620073 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.622765 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-operator-scripts\") pod \"glance-c25e-account-create-update-r58t5\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.623526 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hjg5\" (UniqueName: \"kubernetes.io/projected/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-kube-api-access-6hjg5\") pod \"glance-c25e-account-create-update-r58t5\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.623413 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-operator-scripts\") pod \"glance-c25e-account-create-update-r58t5\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.624137 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.634855 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fxxlf-config-d56bg"] Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.650746 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k2v5v" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.658596 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hjg5\" (UniqueName: \"kubernetes.io/projected/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-kube-api-access-6hjg5\") pod \"glance-c25e-account-create-update-r58t5\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.726634 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-scripts\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.726734 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.726755 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-log-ovn\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.726857 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-additional-scripts\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.727010 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcgpz\" (UniqueName: \"kubernetes.io/projected/6976fddb-280c-4c2b-810a-33274bcfe173-kube-api-access-rcgpz\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.727040 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run-ovn\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.807721 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.828553 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-additional-scripts\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.828663 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcgpz\" (UniqueName: \"kubernetes.io/projected/6976fddb-280c-4c2b-810a-33274bcfe173-kube-api-access-rcgpz\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.828691 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run-ovn\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.828726 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-scripts\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.828778 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.828802 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-log-ovn\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.829624 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-log-ovn\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.829833 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run-ovn\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.830404 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.830756 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-additional-scripts\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.837354 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-scripts\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.859942 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcgpz\" (UniqueName: \"kubernetes.io/projected/6976fddb-280c-4c2b-810a-33274bcfe173-kube-api-access-rcgpz\") pod \"ovn-controller-fxxlf-config-d56bg\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:57 crc kubenswrapper[4811]: I0128 16:05:57.978927 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.058966 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b5c-account-create-update-4frsd"] Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.138409 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-dv6jj"] Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.211859 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.340254 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfsrd\" (UniqueName: \"kubernetes.io/projected/8731a583-f443-4981-adf4-04431f77da50-kube-api-access-vfsrd\") pod \"8731a583-f443-4981-adf4-04431f77da50\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.340410 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8731a583-f443-4981-adf4-04431f77da50-operator-scripts\") pod \"8731a583-f443-4981-adf4-04431f77da50\" (UID: \"8731a583-f443-4981-adf4-04431f77da50\") " Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.341759 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8731a583-f443-4981-adf4-04431f77da50-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8731a583-f443-4981-adf4-04431f77da50" (UID: "8731a583-f443-4981-adf4-04431f77da50"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.350783 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8731a583-f443-4981-adf4-04431f77da50-kube-api-access-vfsrd" (OuterVolumeSpecName: "kube-api-access-vfsrd") pod "8731a583-f443-4981-adf4-04431f77da50" (UID: "8731a583-f443-4981-adf4-04431f77da50"). InnerVolumeSpecName "kube-api-access-vfsrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.376624 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b6a4839-f828-40eb-ad65-4edf33d52160" path="/var/lib/kubelet/pods/0b6a4839-f828-40eb-ad65-4edf33d52160/volumes" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.378455 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6313906-9e80-41ce-b4c1-2f673d09425b" path="/var/lib/kubelet/pods/a6313906-9e80-41ce-b4c1-2f673d09425b/volumes" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.443085 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8731a583-f443-4981-adf4-04431f77da50-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.443128 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfsrd\" (UniqueName: \"kubernetes.io/projected/8731a583-f443-4981-adf4-04431f77da50-kube-api-access-vfsrd\") on node \"crc\" DevicePath \"\"" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.460403 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c7ca-account-create-update-mmwlc"] Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.500624 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dv6jj" event={"ID":"411c0e6f-b062-4538-8fda-434912e7c73d","Type":"ContainerStarted","Data":"461b1f2a631c035ee0040497000f314a0c33ac50a81a9089d9f446efd4a45ed5"} Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.501092 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dv6jj" event={"ID":"411c0e6f-b062-4538-8fda-434912e7c73d","Type":"ContainerStarted","Data":"12c9e5264d3815ff15c90acd2026db8f1238cd71247f78a4c294bc20a97e9d16"} Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.502421 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b5c-account-create-update-4frsd" event={"ID":"63a654e3-54e7-4464-b0b8-683f5f809345","Type":"ContainerStarted","Data":"e8b149fc467c164b5dc15e84b4f1cb9f31f9cc10102d0bf7736810ca7a664fb6"} Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.502458 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b5c-account-create-update-4frsd" event={"ID":"63a654e3-54e7-4464-b0b8-683f5f809345","Type":"ContainerStarted","Data":"4ade87e9d792acfc6038e1c7766c5c2452d13edcc8550fe971a58cf7e98bc5e0"} Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.506905 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jkpn7" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.507768 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jkpn7" event={"ID":"8731a583-f443-4981-adf4-04431f77da50","Type":"ContainerDied","Data":"1340b079a26544a54e6b247bfc24891c5c252cffe3969bf27df3185a9e9da52e"} Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.507799 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1340b079a26544a54e6b247bfc24891c5c252cffe3969bf27df3185a9e9da52e" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.514655 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7ca-account-create-update-mmwlc" event={"ID":"b38aa2ec-ec93-453c-8a6f-7943114f5047","Type":"ContainerStarted","Data":"456cc2d3fa3e76a7cb9ff6239af11e70cbb820b40193c365dd5450553e817cd8"} Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.543507 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-dv6jj" podStartSLOduration=2.543489083 podStartE2EDuration="2.543489083s" podCreationTimestamp="2026-01-28 16:05:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:58.530097702 +0000 UTC m=+1251.284461275" watchObservedRunningTime="2026-01-28 16:05:58.543489083 +0000 UTC m=+1251.297852666" Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.576495 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k2v5v"] Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.591122 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-9b5c-account-create-update-4frsd" podStartSLOduration=2.591068097 podStartE2EDuration="2.591068097s" podCreationTimestamp="2026-01-28 16:05:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:58.561866549 +0000 UTC m=+1251.316230142" watchObservedRunningTime="2026-01-28 16:05:58.591068097 +0000 UTC m=+1251.345431680" Jan 28 16:05:58 crc kubenswrapper[4811]: W0128 16:05:58.598677 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6f56fef_7365_409e_beaa_1b91d5ea44c4.slice/crio-e198c757050efb717bc6272ad1eae49ae61b642484dcf7d10344a8992f88ae8c WatchSource:0}: Error finding container e198c757050efb717bc6272ad1eae49ae61b642484dcf7d10344a8992f88ae8c: Status 404 returned error can't find the container with id e198c757050efb717bc6272ad1eae49ae61b642484dcf7d10344a8992f88ae8c Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.612086 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9sxhc"] Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.683809 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c25e-account-create-update-r58t5"] Jan 28 16:05:58 crc kubenswrapper[4811]: I0128 16:05:58.839861 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fxxlf-config-d56bg"] Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.524653 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9sxhc" event={"ID":"a6f56fef-7365-409e-beaa-1b91d5ea44c4","Type":"ContainerStarted","Data":"faaf959bb7d719d81e75fe135d7ec377756b2770bdced5ddd98fe7f32a8ff3c2"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.524990 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9sxhc" event={"ID":"a6f56fef-7365-409e-beaa-1b91d5ea44c4","Type":"ContainerStarted","Data":"e198c757050efb717bc6272ad1eae49ae61b642484dcf7d10344a8992f88ae8c"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.527297 4811 generic.go:334] "Generic (PLEG): container finished" podID="411c0e6f-b062-4538-8fda-434912e7c73d" containerID="461b1f2a631c035ee0040497000f314a0c33ac50a81a9089d9f446efd4a45ed5" exitCode=0 Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.527366 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dv6jj" event={"ID":"411c0e6f-b062-4538-8fda-434912e7c73d","Type":"ContainerDied","Data":"461b1f2a631c035ee0040497000f314a0c33ac50a81a9089d9f446efd4a45ed5"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.530210 4811 generic.go:334] "Generic (PLEG): container finished" podID="ce03e462-76ab-488d-81d9-578a4105320c" containerID="33fc76229b44af1d5e99aae4aff4bb9e9588948d91bdcbefef6909f49f3c29dd" exitCode=0 Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.530319 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k2v5v" event={"ID":"ce03e462-76ab-488d-81d9-578a4105320c","Type":"ContainerDied","Data":"33fc76229b44af1d5e99aae4aff4bb9e9588948d91bdcbefef6909f49f3c29dd"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.530335 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k2v5v" event={"ID":"ce03e462-76ab-488d-81d9-578a4105320c","Type":"ContainerStarted","Data":"ea7538251c746ff204f3ab268e19db5439a11670c0ea67b71dae518e4bae4380"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.532310 4811 generic.go:334] "Generic (PLEG): container finished" podID="63a654e3-54e7-4464-b0b8-683f5f809345" containerID="e8b149fc467c164b5dc15e84b4f1cb9f31f9cc10102d0bf7736810ca7a664fb6" exitCode=0 Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.532393 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b5c-account-create-update-4frsd" event={"ID":"63a654e3-54e7-4464-b0b8-683f5f809345","Type":"ContainerDied","Data":"e8b149fc467c164b5dc15e84b4f1cb9f31f9cc10102d0bf7736810ca7a664fb6"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.534883 4811 generic.go:334] "Generic (PLEG): container finished" podID="b38aa2ec-ec93-453c-8a6f-7943114f5047" containerID="5a3fa3b8d7bb0be864b879d43a6dbbc29d9bee3fe0f2db101af93bfd5822456a" exitCode=0 Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.535032 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7ca-account-create-update-mmwlc" event={"ID":"b38aa2ec-ec93-453c-8a6f-7943114f5047","Type":"ContainerDied","Data":"5a3fa3b8d7bb0be864b879d43a6dbbc29d9bee3fe0f2db101af93bfd5822456a"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.537126 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c25e-account-create-update-r58t5" event={"ID":"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8","Type":"ContainerStarted","Data":"d271cbcb27898c830d254bcb5c8dffe5a3f7640e10dbf52bc69624d036c550ec"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.537157 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c25e-account-create-update-r58t5" event={"ID":"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8","Type":"ContainerStarted","Data":"3a60c3f99937b24d6e4d16d1c17962a32eae0bf28754ddfd993a7d70f4fa143d"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.541652 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-d56bg" event={"ID":"6976fddb-280c-4c2b-810a-33274bcfe173","Type":"ContainerStarted","Data":"2c0184b6555e2c8ad21e10f4b931d8a3dd221bc5ed55f0ff10d98fe0449c7daa"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.541696 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-d56bg" event={"ID":"6976fddb-280c-4c2b-810a-33274bcfe173","Type":"ContainerStarted","Data":"ac0eb45e3e490bd18994d819d27f2651a035a9d49a8ec757fefbb0d3c050dd68"} Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.554228 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-9sxhc" podStartSLOduration=3.554207113 podStartE2EDuration="3.554207113s" podCreationTimestamp="2026-01-28 16:05:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:59.546210617 +0000 UTC m=+1252.300574200" watchObservedRunningTime="2026-01-28 16:05:59.554207113 +0000 UTC m=+1252.308570706" Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.588532 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-fxxlf-config-d56bg" podStartSLOduration=2.588514909 podStartE2EDuration="2.588514909s" podCreationTimestamp="2026-01-28 16:05:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:59.583349 +0000 UTC m=+1252.337712583" watchObservedRunningTime="2026-01-28 16:05:59.588514909 +0000 UTC m=+1252.342878492" Jan 28 16:05:59 crc kubenswrapper[4811]: I0128 16:05:59.602401 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-c25e-account-create-update-r58t5" podStartSLOduration=2.602379823 podStartE2EDuration="2.602379823s" podCreationTimestamp="2026-01-28 16:05:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:05:59.601236973 +0000 UTC m=+1252.355600556" watchObservedRunningTime="2026-01-28 16:05:59.602379823 +0000 UTC m=+1252.356743406" Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.478259 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-jkpn7"] Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.489292 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-jkpn7"] Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.572984 4811 generic.go:334] "Generic (PLEG): container finished" podID="a6f56fef-7365-409e-beaa-1b91d5ea44c4" containerID="faaf959bb7d719d81e75fe135d7ec377756b2770bdced5ddd98fe7f32a8ff3c2" exitCode=0 Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.573098 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9sxhc" event={"ID":"a6f56fef-7365-409e-beaa-1b91d5ea44c4","Type":"ContainerDied","Data":"faaf959bb7d719d81e75fe135d7ec377756b2770bdced5ddd98fe7f32a8ff3c2"} Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.576757 4811 generic.go:334] "Generic (PLEG): container finished" podID="3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" containerID="d271cbcb27898c830d254bcb5c8dffe5a3f7640e10dbf52bc69624d036c550ec" exitCode=0 Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.576809 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c25e-account-create-update-r58t5" event={"ID":"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8","Type":"ContainerDied","Data":"d271cbcb27898c830d254bcb5c8dffe5a3f7640e10dbf52bc69624d036c550ec"} Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.579696 4811 generic.go:334] "Generic (PLEG): container finished" podID="6976fddb-280c-4c2b-810a-33274bcfe173" containerID="2c0184b6555e2c8ad21e10f4b931d8a3dd221bc5ed55f0ff10d98fe0449c7daa" exitCode=0 Jan 28 16:06:00 crc kubenswrapper[4811]: I0128 16:06:00.579922 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-d56bg" event={"ID":"6976fddb-280c-4c2b-810a-33274bcfe173","Type":"ContainerDied","Data":"2c0184b6555e2c8ad21e10f4b931d8a3dd221bc5ed55f0ff10d98fe0449c7daa"} Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.152977 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.157554 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k2v5v" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.215367 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf4mx\" (UniqueName: \"kubernetes.io/projected/ce03e462-76ab-488d-81d9-578a4105320c-kube-api-access-pf4mx\") pod \"ce03e462-76ab-488d-81d9-578a4105320c\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.215550 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce03e462-76ab-488d-81d9-578a4105320c-operator-scripts\") pod \"ce03e462-76ab-488d-81d9-578a4105320c\" (UID: \"ce03e462-76ab-488d-81d9-578a4105320c\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.215627 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a654e3-54e7-4464-b0b8-683f5f809345-operator-scripts\") pod \"63a654e3-54e7-4464-b0b8-683f5f809345\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.215776 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps4gr\" (UniqueName: \"kubernetes.io/projected/63a654e3-54e7-4464-b0b8-683f5f809345-kube-api-access-ps4gr\") pod \"63a654e3-54e7-4464-b0b8-683f5f809345\" (UID: \"63a654e3-54e7-4464-b0b8-683f5f809345\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.217727 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63a654e3-54e7-4464-b0b8-683f5f809345-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63a654e3-54e7-4464-b0b8-683f5f809345" (UID: "63a654e3-54e7-4464-b0b8-683f5f809345"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.218716 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce03e462-76ab-488d-81d9-578a4105320c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ce03e462-76ab-488d-81d9-578a4105320c" (UID: "ce03e462-76ab-488d-81d9-578a4105320c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.219843 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63a654e3-54e7-4464-b0b8-683f5f809345-kube-api-access-ps4gr" (OuterVolumeSpecName: "kube-api-access-ps4gr") pod "63a654e3-54e7-4464-b0b8-683f5f809345" (UID: "63a654e3-54e7-4464-b0b8-683f5f809345"). InnerVolumeSpecName "kube-api-access-ps4gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.229358 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce03e462-76ab-488d-81d9-578a4105320c-kube-api-access-pf4mx" (OuterVolumeSpecName: "kube-api-access-pf4mx") pod "ce03e462-76ab-488d-81d9-578a4105320c" (UID: "ce03e462-76ab-488d-81d9-578a4105320c"). InnerVolumeSpecName "kube-api-access-pf4mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.255319 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.264508 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dv6jj" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318083 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw48x\" (UniqueName: \"kubernetes.io/projected/b38aa2ec-ec93-453c-8a6f-7943114f5047-kube-api-access-pw48x\") pod \"b38aa2ec-ec93-453c-8a6f-7943114f5047\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318160 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/411c0e6f-b062-4538-8fda-434912e7c73d-operator-scripts\") pod \"411c0e6f-b062-4538-8fda-434912e7c73d\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318184 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p4mj\" (UniqueName: \"kubernetes.io/projected/411c0e6f-b062-4538-8fda-434912e7c73d-kube-api-access-5p4mj\") pod \"411c0e6f-b062-4538-8fda-434912e7c73d\" (UID: \"411c0e6f-b062-4538-8fda-434912e7c73d\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318202 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b38aa2ec-ec93-453c-8a6f-7943114f5047-operator-scripts\") pod \"b38aa2ec-ec93-453c-8a6f-7943114f5047\" (UID: \"b38aa2ec-ec93-453c-8a6f-7943114f5047\") " Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318878 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps4gr\" (UniqueName: \"kubernetes.io/projected/63a654e3-54e7-4464-b0b8-683f5f809345-kube-api-access-ps4gr\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318890 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf4mx\" (UniqueName: \"kubernetes.io/projected/ce03e462-76ab-488d-81d9-578a4105320c-kube-api-access-pf4mx\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318901 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce03e462-76ab-488d-81d9-578a4105320c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318909 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a654e3-54e7-4464-b0b8-683f5f809345-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.318942 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/411c0e6f-b062-4538-8fda-434912e7c73d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "411c0e6f-b062-4538-8fda-434912e7c73d" (UID: "411c0e6f-b062-4538-8fda-434912e7c73d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.319702 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b38aa2ec-ec93-453c-8a6f-7943114f5047-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b38aa2ec-ec93-453c-8a6f-7943114f5047" (UID: "b38aa2ec-ec93-453c-8a6f-7943114f5047"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.324037 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b38aa2ec-ec93-453c-8a6f-7943114f5047-kube-api-access-pw48x" (OuterVolumeSpecName: "kube-api-access-pw48x") pod "b38aa2ec-ec93-453c-8a6f-7943114f5047" (UID: "b38aa2ec-ec93-453c-8a6f-7943114f5047"). InnerVolumeSpecName "kube-api-access-pw48x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.324786 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/411c0e6f-b062-4538-8fda-434912e7c73d-kube-api-access-5p4mj" (OuterVolumeSpecName: "kube-api-access-5p4mj") pod "411c0e6f-b062-4538-8fda-434912e7c73d" (UID: "411c0e6f-b062-4538-8fda-434912e7c73d"). InnerVolumeSpecName "kube-api-access-5p4mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.420743 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw48x\" (UniqueName: \"kubernetes.io/projected/b38aa2ec-ec93-453c-8a6f-7943114f5047-kube-api-access-pw48x\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.420781 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/411c0e6f-b062-4538-8fda-434912e7c73d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.420790 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p4mj\" (UniqueName: \"kubernetes.io/projected/411c0e6f-b062-4538-8fda-434912e7c73d-kube-api-access-5p4mj\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.420801 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b38aa2ec-ec93-453c-8a6f-7943114f5047-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.587934 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6dd44d16-5b30-493c-9dd2-1ba856a4393a","Type":"ContainerStarted","Data":"4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94"} Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.588770 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.591332 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dv6jj" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.592413 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dv6jj" event={"ID":"411c0e6f-b062-4538-8fda-434912e7c73d","Type":"ContainerDied","Data":"12c9e5264d3815ff15c90acd2026db8f1238cd71247f78a4c294bc20a97e9d16"} Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.592489 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12c9e5264d3815ff15c90acd2026db8f1238cd71247f78a4c294bc20a97e9d16" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.594098 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b5c-account-create-update-4frsd" event={"ID":"63a654e3-54e7-4464-b0b8-683f5f809345","Type":"ContainerDied","Data":"4ade87e9d792acfc6038e1c7766c5c2452d13edcc8550fe971a58cf7e98bc5e0"} Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.594138 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ade87e9d792acfc6038e1c7766c5c2452d13edcc8550fe971a58cf7e98bc5e0" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.594231 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-4frsd" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.597308 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k2v5v" event={"ID":"ce03e462-76ab-488d-81d9-578a4105320c","Type":"ContainerDied","Data":"ea7538251c746ff204f3ab268e19db5439a11670c0ea67b71dae518e4bae4380"} Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.597341 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea7538251c746ff204f3ab268e19db5439a11670c0ea67b71dae518e4bae4380" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.597404 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k2v5v" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.602911 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7ca-account-create-update-mmwlc" event={"ID":"b38aa2ec-ec93-453c-8a6f-7943114f5047","Type":"ContainerDied","Data":"456cc2d3fa3e76a7cb9ff6239af11e70cbb820b40193c365dd5450553e817cd8"} Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.603058 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7ca-account-create-update-mmwlc" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.607036 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="456cc2d3fa3e76a7cb9ff6239af11e70cbb820b40193c365dd5450553e817cd8" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.620834 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.534038284 podStartE2EDuration="33.620817143s" podCreationTimestamp="2026-01-28 16:05:28 +0000 UTC" firstStartedPulling="2026-01-28 16:05:30.062719547 +0000 UTC m=+1222.817083130" lastFinishedPulling="2026-01-28 16:06:01.149498406 +0000 UTC m=+1253.903861989" observedRunningTime="2026-01-28 16:06:01.614706908 +0000 UTC m=+1254.369070491" watchObservedRunningTime="2026-01-28 16:06:01.620817143 +0000 UTC m=+1254.375180726" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.911931 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9sxhc" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.957712 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:06:01 crc kubenswrapper[4811]: I0128 16:06:01.992080 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.033490 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run-ovn\") pod \"6976fddb-280c-4c2b-810a-33274bcfe173\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.033627 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "6976fddb-280c-4c2b-810a-33274bcfe173" (UID: "6976fddb-280c-4c2b-810a-33274bcfe173"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.033631 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-scripts\") pod \"6976fddb-280c-4c2b-810a-33274bcfe173\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034583 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run\") pod \"6976fddb-280c-4c2b-810a-33274bcfe173\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034612 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-log-ovn\") pod \"6976fddb-280c-4c2b-810a-33274bcfe173\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034641 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcgpz\" (UniqueName: \"kubernetes.io/projected/6976fddb-280c-4c2b-810a-33274bcfe173-kube-api-access-rcgpz\") pod \"6976fddb-280c-4c2b-810a-33274bcfe173\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "6976fddb-280c-4c2b-810a-33274bcfe173" (UID: "6976fddb-280c-4c2b-810a-33274bcfe173"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034702 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run" (OuterVolumeSpecName: "var-run") pod "6976fddb-280c-4c2b-810a-33274bcfe173" (UID: "6976fddb-280c-4c2b-810a-33274bcfe173"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034718 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6f56fef-7365-409e-beaa-1b91d5ea44c4-operator-scripts\") pod \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034802 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-additional-scripts\") pod \"6976fddb-280c-4c2b-810a-33274bcfe173\" (UID: \"6976fddb-280c-4c2b-810a-33274bcfe173\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.034853 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln9sx\" (UniqueName: \"kubernetes.io/projected/a6f56fef-7365-409e-beaa-1b91d5ea44c4-kube-api-access-ln9sx\") pod \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\" (UID: \"a6f56fef-7365-409e-beaa-1b91d5ea44c4\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035044 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6f56fef-7365-409e-beaa-1b91d5ea44c4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a6f56fef-7365-409e-beaa-1b91d5ea44c4" (UID: "a6f56fef-7365-409e-beaa-1b91d5ea44c4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035376 4811 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035401 4811 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035413 4811 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6976fddb-280c-4c2b-810a-33274bcfe173-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035429 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6f56fef-7365-409e-beaa-1b91d5ea44c4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035474 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "6976fddb-280c-4c2b-810a-33274bcfe173" (UID: "6976fddb-280c-4c2b-810a-33274bcfe173"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.035814 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-scripts" (OuterVolumeSpecName: "scripts") pod "6976fddb-280c-4c2b-810a-33274bcfe173" (UID: "6976fddb-280c-4c2b-810a-33274bcfe173"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.039672 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6f56fef-7365-409e-beaa-1b91d5ea44c4-kube-api-access-ln9sx" (OuterVolumeSpecName: "kube-api-access-ln9sx") pod "a6f56fef-7365-409e-beaa-1b91d5ea44c4" (UID: "a6f56fef-7365-409e-beaa-1b91d5ea44c4"). InnerVolumeSpecName "kube-api-access-ln9sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.040827 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6976fddb-280c-4c2b-810a-33274bcfe173-kube-api-access-rcgpz" (OuterVolumeSpecName: "kube-api-access-rcgpz") pod "6976fddb-280c-4c2b-810a-33274bcfe173" (UID: "6976fddb-280c-4c2b-810a-33274bcfe173"). InnerVolumeSpecName "kube-api-access-rcgpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.136285 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-operator-scripts\") pod \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.136378 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hjg5\" (UniqueName: \"kubernetes.io/projected/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-kube-api-access-6hjg5\") pod \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\" (UID: \"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8\") " Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.136820 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.136836 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcgpz\" (UniqueName: \"kubernetes.io/projected/6976fddb-280c-4c2b-810a-33274bcfe173-kube-api-access-rcgpz\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.136848 4811 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6976fddb-280c-4c2b-810a-33274bcfe173-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.136858 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln9sx\" (UniqueName: \"kubernetes.io/projected/a6f56fef-7365-409e-beaa-1b91d5ea44c4-kube-api-access-ln9sx\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.138153 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" (UID: "3dd67fd5-42d7-409d-8c89-3b2e9fe323c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.141411 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-kube-api-access-6hjg5" (OuterVolumeSpecName: "kube-api-access-6hjg5") pod "3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" (UID: "3dd67fd5-42d7-409d-8c89-3b2e9fe323c8"). InnerVolumeSpecName "kube-api-access-6hjg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.238761 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.239218 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hjg5\" (UniqueName: \"kubernetes.io/projected/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8-kube-api-access-6hjg5\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.353641 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8731a583-f443-4981-adf4-04431f77da50" path="/var/lib/kubelet/pods/8731a583-f443-4981-adf4-04431f77da50/volumes" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.442893 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.454104 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"swift-storage-0\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " pod="openstack/swift-storage-0" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.612483 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c25e-account-create-update-r58t5" event={"ID":"3dd67fd5-42d7-409d-8c89-3b2e9fe323c8","Type":"ContainerDied","Data":"3a60c3f99937b24d6e4d16d1c17962a32eae0bf28754ddfd993a7d70f4fa143d"} Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.613272 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a60c3f99937b24d6e4d16d1c17962a32eae0bf28754ddfd993a7d70f4fa143d" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.612581 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c25e-account-create-update-r58t5" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.614418 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf-config-d56bg" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.614504 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf-config-d56bg" event={"ID":"6976fddb-280c-4c2b-810a-33274bcfe173","Type":"ContainerDied","Data":"ac0eb45e3e490bd18994d819d27f2651a035a9d49a8ec757fefbb0d3c050dd68"} Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.614536 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac0eb45e3e490bd18994d819d27f2651a035a9d49a8ec757fefbb0d3c050dd68" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.616613 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9sxhc" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.616612 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9sxhc" event={"ID":"a6f56fef-7365-409e-beaa-1b91d5ea44c4","Type":"ContainerDied","Data":"e198c757050efb717bc6272ad1eae49ae61b642484dcf7d10344a8992f88ae8c"} Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.616652 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e198c757050efb717bc6272ad1eae49ae61b642484dcf7d10344a8992f88ae8c" Jan 28 16:06:02 crc kubenswrapper[4811]: I0128 16:06:02.710203 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.049636 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fxxlf-config-d56bg"] Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.062823 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-fxxlf-config-d56bg"] Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.087013 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.087077 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.301488 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.625749 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"f26372c2e1c249ba75c4cceec86f49d43ffd77081b900992110ba959502c84f1"} Jan 28 16:06:03 crc kubenswrapper[4811]: I0128 16:06:03.966688 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.218966 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-9g69v"] Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219265 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6976fddb-280c-4c2b-810a-33274bcfe173" containerName="ovn-config" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219283 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6976fddb-280c-4c2b-810a-33274bcfe173" containerName="ovn-config" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219299 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411c0e6f-b062-4538-8fda-434912e7c73d" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219305 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="411c0e6f-b062-4538-8fda-434912e7c73d" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219316 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce03e462-76ab-488d-81d9-578a4105320c" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219322 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce03e462-76ab-488d-81d9-578a4105320c" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219332 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219337 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219348 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8731a583-f443-4981-adf4-04431f77da50" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219354 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8731a583-f443-4981-adf4-04431f77da50" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219362 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38aa2ec-ec93-453c-8a6f-7943114f5047" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219367 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38aa2ec-ec93-453c-8a6f-7943114f5047" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219403 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f56fef-7365-409e-beaa-1b91d5ea44c4" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219410 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f56fef-7365-409e-beaa-1b91d5ea44c4" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: E0128 16:06:04.219419 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63a654e3-54e7-4464-b0b8-683f5f809345" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219425 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="63a654e3-54e7-4464-b0b8-683f5f809345" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219831 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="63a654e3-54e7-4464-b0b8-683f5f809345" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219843 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce03e462-76ab-488d-81d9-578a4105320c" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219851 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b38aa2ec-ec93-453c-8a6f-7943114f5047" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219859 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8731a583-f443-4981-adf4-04431f77da50" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219866 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" containerName="mariadb-account-create-update" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219877 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="411c0e6f-b062-4538-8fda-434912e7c73d" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219885 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6f56fef-7365-409e-beaa-1b91d5ea44c4" containerName="mariadb-database-create" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.219892 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6976fddb-280c-4c2b-810a-33274bcfe173" containerName="ovn-config" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.220365 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.222835 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.235960 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9g69v"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.271662 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.282086 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99a30150-2d2e-420e-aba9-d7d99b62a612-operator-scripts\") pod \"root-account-create-update-9g69v\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.282414 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqq96\" (UniqueName: \"kubernetes.io/projected/99a30150-2d2e-420e-aba9-d7d99b62a612-kube-api-access-sqq96\") pod \"root-account-create-update-9g69v\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.333449 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-kqxgl"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.334501 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.352839 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6976fddb-280c-4c2b-810a-33274bcfe173" path="/var/lib/kubelet/pods/6976fddb-280c-4c2b-810a-33274bcfe173/volumes" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.365177 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kqxgl"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.385588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqq96\" (UniqueName: \"kubernetes.io/projected/99a30150-2d2e-420e-aba9-d7d99b62a612-kube-api-access-sqq96\") pod \"root-account-create-update-9g69v\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.385678 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx25x\" (UniqueName: \"kubernetes.io/projected/69c315f0-b684-4e32-b680-ca6bc1823beb-kube-api-access-qx25x\") pod \"barbican-db-create-kqxgl\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.385755 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c315f0-b684-4e32-b680-ca6bc1823beb-operator-scripts\") pod \"barbican-db-create-kqxgl\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.385788 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99a30150-2d2e-420e-aba9-d7d99b62a612-operator-scripts\") pod \"root-account-create-update-9g69v\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.395042 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99a30150-2d2e-420e-aba9-d7d99b62a612-operator-scripts\") pod \"root-account-create-update-9g69v\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.426101 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqq96\" (UniqueName: \"kubernetes.io/projected/99a30150-2d2e-420e-aba9-d7d99b62a612-kube-api-access-sqq96\") pod \"root-account-create-update-9g69v\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.432450 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-2f6fc"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.433587 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.459811 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-2f6fc"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.487682 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c315f0-b684-4e32-b680-ca6bc1823beb-operator-scripts\") pod \"barbican-db-create-kqxgl\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.487743 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp6mw\" (UniqueName: \"kubernetes.io/projected/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-kube-api-access-tp6mw\") pod \"cinder-db-create-2f6fc\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.487856 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-operator-scripts\") pod \"cinder-db-create-2f6fc\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.487944 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx25x\" (UniqueName: \"kubernetes.io/projected/69c315f0-b684-4e32-b680-ca6bc1823beb-kube-api-access-qx25x\") pod \"barbican-db-create-kqxgl\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.489689 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c315f0-b684-4e32-b680-ca6bc1823beb-operator-scripts\") pod \"barbican-db-create-kqxgl\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.505521 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-89d7-account-create-update-mc8pj"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.506844 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.509927 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.541336 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx25x\" (UniqueName: \"kubernetes.io/projected/69c315f0-b684-4e32-b680-ca6bc1823beb-kube-api-access-qx25x\") pod \"barbican-db-create-kqxgl\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.545513 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-89d7-account-create-update-mc8pj"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.552642 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.565174 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-917c-account-create-update-kfdqq"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.566177 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.572292 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.587940 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-917c-account-create-update-kfdqq"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.588892 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-operator-scripts\") pod \"cinder-db-create-2f6fc\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.588966 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b151453e-b13d-4bbe-b156-c7d264aefd8a-operator-scripts\") pod \"cinder-89d7-account-create-update-mc8pj\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.589046 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp6mw\" (UniqueName: \"kubernetes.io/projected/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-kube-api-access-tp6mw\") pod \"cinder-db-create-2f6fc\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.589072 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkqht\" (UniqueName: \"kubernetes.io/projected/b151453e-b13d-4bbe-b156-c7d264aefd8a-kube-api-access-hkqht\") pod \"cinder-89d7-account-create-update-mc8pj\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.589872 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-operator-scripts\") pod \"cinder-db-create-2f6fc\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.619104 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp6mw\" (UniqueName: \"kubernetes.io/projected/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-kube-api-access-tp6mw\") pod \"cinder-db-create-2f6fc\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.654625 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.692449 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4a0a450-f81c-41d2-8400-b12baf800baa-operator-scripts\") pod \"barbican-917c-account-create-update-kfdqq\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.692510 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkqht\" (UniqueName: \"kubernetes.io/projected/b151453e-b13d-4bbe-b156-c7d264aefd8a-kube-api-access-hkqht\") pod \"cinder-89d7-account-create-update-mc8pj\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.692574 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mqgr\" (UniqueName: \"kubernetes.io/projected/b4a0a450-f81c-41d2-8400-b12baf800baa-kube-api-access-5mqgr\") pod \"barbican-917c-account-create-update-kfdqq\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.692658 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b151453e-b13d-4bbe-b156-c7d264aefd8a-operator-scripts\") pod \"cinder-89d7-account-create-update-mc8pj\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.693523 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b151453e-b13d-4bbe-b156-c7d264aefd8a-operator-scripts\") pod \"cinder-89d7-account-create-update-mc8pj\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.714091 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkqht\" (UniqueName: \"kubernetes.io/projected/b151453e-b13d-4bbe-b156-c7d264aefd8a-kube-api-access-hkqht\") pod \"cinder-89d7-account-create-update-mc8pj\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.777687 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-h47z9"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.778869 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.790564 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-h47z9"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.801000 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mqgr\" (UniqueName: \"kubernetes.io/projected/b4a0a450-f81c-41d2-8400-b12baf800baa-kube-api-access-5mqgr\") pod \"barbican-917c-account-create-update-kfdqq\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.801182 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4a0a450-f81c-41d2-8400-b12baf800baa-operator-scripts\") pod \"barbican-917c-account-create-update-kfdqq\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.802069 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4a0a450-f81c-41d2-8400-b12baf800baa-operator-scripts\") pod \"barbican-917c-account-create-update-kfdqq\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.803608 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e221-account-create-update-6bntv"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.810726 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.814136 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e221-account-create-update-6bntv"] Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.814498 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.814758 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.832907 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mqgr\" (UniqueName: \"kubernetes.io/projected/b4a0a450-f81c-41d2-8400-b12baf800baa-kube-api-access-5mqgr\") pod \"barbican-917c-account-create-update-kfdqq\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.870002 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.887837 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.902890 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e7de29a-2b04-4dc8-99f1-1d3568819adf-operator-scripts\") pod \"neutron-db-create-h47z9\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.902968 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1b1a49-b786-45bb-8f17-d29c10d099bc-operator-scripts\") pod \"neutron-e221-account-create-update-6bntv\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.903001 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjmfx\" (UniqueName: \"kubernetes.io/projected/4c1b1a49-b786-45bb-8f17-d29c10d099bc-kube-api-access-gjmfx\") pod \"neutron-e221-account-create-update-6bntv\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:04 crc kubenswrapper[4811]: I0128 16:06:04.903062 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27mwz\" (UniqueName: \"kubernetes.io/projected/8e7de29a-2b04-4dc8-99f1-1d3568819adf-kube-api-access-27mwz\") pod \"neutron-db-create-h47z9\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.004076 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1b1a49-b786-45bb-8f17-d29c10d099bc-operator-scripts\") pod \"neutron-e221-account-create-update-6bntv\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.004375 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjmfx\" (UniqueName: \"kubernetes.io/projected/4c1b1a49-b786-45bb-8f17-d29c10d099bc-kube-api-access-gjmfx\") pod \"neutron-e221-account-create-update-6bntv\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.004452 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27mwz\" (UniqueName: \"kubernetes.io/projected/8e7de29a-2b04-4dc8-99f1-1d3568819adf-kube-api-access-27mwz\") pod \"neutron-db-create-h47z9\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.004511 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e7de29a-2b04-4dc8-99f1-1d3568819adf-operator-scripts\") pod \"neutron-db-create-h47z9\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.004915 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1b1a49-b786-45bb-8f17-d29c10d099bc-operator-scripts\") pod \"neutron-e221-account-create-update-6bntv\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.005150 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e7de29a-2b04-4dc8-99f1-1d3568819adf-operator-scripts\") pod \"neutron-db-create-h47z9\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.025992 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27mwz\" (UniqueName: \"kubernetes.io/projected/8e7de29a-2b04-4dc8-99f1-1d3568819adf-kube-api-access-27mwz\") pod \"neutron-db-create-h47z9\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.027072 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjmfx\" (UniqueName: \"kubernetes.io/projected/4c1b1a49-b786-45bb-8f17-d29c10d099bc-kube-api-access-gjmfx\") pod \"neutron-e221-account-create-update-6bntv\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.108419 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.149451 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.153784 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9g69v"] Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.303372 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kqxgl"] Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.359773 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-2f6fc"] Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.500823 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-89d7-account-create-update-mc8pj"] Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.526853 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-917c-account-create-update-kfdqq"] Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.648937 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9g69v" event={"ID":"99a30150-2d2e-420e-aba9-d7d99b62a612","Type":"ContainerStarted","Data":"ba39a2972426d7c9ea47b497264068df024d7cceb8186d58579bb53c25e9221b"} Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.688878 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-h47z9"] Jan 28 16:06:05 crc kubenswrapper[4811]: W0128 16:06:05.730998 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e7de29a_2b04_4dc8_99f1_1d3568819adf.slice/crio-81cb9bf548d042b6de2d6fb7d1a6e45267728c06e1e054b714f6c3cbf72cdd65 WatchSource:0}: Error finding container 81cb9bf548d042b6de2d6fb7d1a6e45267728c06e1e054b714f6c3cbf72cdd65: Status 404 returned error can't find the container with id 81cb9bf548d042b6de2d6fb7d1a6e45267728c06e1e054b714f6c3cbf72cdd65 Jan 28 16:06:05 crc kubenswrapper[4811]: W0128 16:06:05.733245 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4a0a450_f81c_41d2_8400_b12baf800baa.slice/crio-39a7c7bb7a0397d13eecd4ce124a918e0423d5f83d29033650d8c5ba0e5ff8cd WatchSource:0}: Error finding container 39a7c7bb7a0397d13eecd4ce124a918e0423d5f83d29033650d8c5ba0e5ff8cd: Status 404 returned error can't find the container with id 39a7c7bb7a0397d13eecd4ce124a918e0423d5f83d29033650d8c5ba0e5ff8cd Jan 28 16:06:05 crc kubenswrapper[4811]: W0128 16:06:05.735833 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f54b0ba_3a97_41d2_a048_01d6dd6daeca.slice/crio-841944ffa3993b8bb05cd3cdfc76138285a6189ae02929aceba48b01849caa98 WatchSource:0}: Error finding container 841944ffa3993b8bb05cd3cdfc76138285a6189ae02929aceba48b01849caa98: Status 404 returned error can't find the container with id 841944ffa3993b8bb05cd3cdfc76138285a6189ae02929aceba48b01849caa98 Jan 28 16:06:05 crc kubenswrapper[4811]: I0128 16:06:05.793874 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e221-account-create-update-6bntv"] Jan 28 16:06:05 crc kubenswrapper[4811]: W0128 16:06:05.827301 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c1b1a49_b786_45bb_8f17_d29c10d099bc.slice/crio-0a90eb3ba774e445220689ac8c2bc6ea959538dceee195d9938a0eaa42c16b90 WatchSource:0}: Error finding container 0a90eb3ba774e445220689ac8c2bc6ea959538dceee195d9938a0eaa42c16b90: Status 404 returned error can't find the container with id 0a90eb3ba774e445220689ac8c2bc6ea959538dceee195d9938a0eaa42c16b90 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.665264 4811 generic.go:334] "Generic (PLEG): container finished" podID="b151453e-b13d-4bbe-b156-c7d264aefd8a" containerID="e245bc5e0a213d1b27f569389ed440b4a2a0e5de3a49de48ac3935e44c74ce9b" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.665332 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-89d7-account-create-update-mc8pj" event={"ID":"b151453e-b13d-4bbe-b156-c7d264aefd8a","Type":"ContainerDied","Data":"e245bc5e0a213d1b27f569389ed440b4a2a0e5de3a49de48ac3935e44c74ce9b"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.665365 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-89d7-account-create-update-mc8pj" event={"ID":"b151453e-b13d-4bbe-b156-c7d264aefd8a","Type":"ContainerStarted","Data":"c9ee58aa81859dae05a9098b0ad2b58fd58c3cefa6ed5ec2258b335cc9d8652e"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.673078 4811 generic.go:334] "Generic (PLEG): container finished" podID="99a30150-2d2e-420e-aba9-d7d99b62a612" containerID="1ceb6f6a94b79fe56385e3ac04ed41338c77b0c74c77cd24cf368cbae3715bd3" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.673158 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9g69v" event={"ID":"99a30150-2d2e-420e-aba9-d7d99b62a612","Type":"ContainerDied","Data":"1ceb6f6a94b79fe56385e3ac04ed41338c77b0c74c77cd24cf368cbae3715bd3"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.675297 4811 generic.go:334] "Generic (PLEG): container finished" podID="4c1b1a49-b786-45bb-8f17-d29c10d099bc" containerID="daf9818e6465dcfb5327240a49eeaa4ce4fa977d0f7bce0e7bcf99376e8eb451" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.675458 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e221-account-create-update-6bntv" event={"ID":"4c1b1a49-b786-45bb-8f17-d29c10d099bc","Type":"ContainerDied","Data":"daf9818e6465dcfb5327240a49eeaa4ce4fa977d0f7bce0e7bcf99376e8eb451"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.675509 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e221-account-create-update-6bntv" event={"ID":"4c1b1a49-b786-45bb-8f17-d29c10d099bc","Type":"ContainerStarted","Data":"0a90eb3ba774e445220689ac8c2bc6ea959538dceee195d9938a0eaa42c16b90"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.677188 4811 generic.go:334] "Generic (PLEG): container finished" podID="69c315f0-b684-4e32-b680-ca6bc1823beb" containerID="883dabd11b048cfe00fac20c4bf6061579e989100466ed30712029e12cebf29d" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.677243 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kqxgl" event={"ID":"69c315f0-b684-4e32-b680-ca6bc1823beb","Type":"ContainerDied","Data":"883dabd11b048cfe00fac20c4bf6061579e989100466ed30712029e12cebf29d"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.677259 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kqxgl" event={"ID":"69c315f0-b684-4e32-b680-ca6bc1823beb","Type":"ContainerStarted","Data":"a510eedb9af53077f4baff2a396bf4d916bcc02001c2ac613c1c28eed14c7b0c"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.680857 4811 generic.go:334] "Generic (PLEG): container finished" podID="8e7de29a-2b04-4dc8-99f1-1d3568819adf" containerID="ac51924788f9396826cde65d3bed2e0e948d0867ac9860b62b53b6c4f2d570a3" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.680922 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h47z9" event={"ID":"8e7de29a-2b04-4dc8-99f1-1d3568819adf","Type":"ContainerDied","Data":"ac51924788f9396826cde65d3bed2e0e948d0867ac9860b62b53b6c4f2d570a3"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.680949 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h47z9" event={"ID":"8e7de29a-2b04-4dc8-99f1-1d3568819adf","Type":"ContainerStarted","Data":"81cb9bf548d042b6de2d6fb7d1a6e45267728c06e1e054b714f6c3cbf72cdd65"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.682952 4811 generic.go:334] "Generic (PLEG): container finished" podID="2f54b0ba-3a97-41d2-a048-01d6dd6daeca" containerID="4a7d9e07ca981c6204e9b634c786607928fb94b0320825c489d7eca986c71543" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.682995 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-2f6fc" event={"ID":"2f54b0ba-3a97-41d2-a048-01d6dd6daeca","Type":"ContainerDied","Data":"4a7d9e07ca981c6204e9b634c786607928fb94b0320825c489d7eca986c71543"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.683012 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-2f6fc" event={"ID":"2f54b0ba-3a97-41d2-a048-01d6dd6daeca","Type":"ContainerStarted","Data":"841944ffa3993b8bb05cd3cdfc76138285a6189ae02929aceba48b01849caa98"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.685767 4811 generic.go:334] "Generic (PLEG): container finished" podID="b4a0a450-f81c-41d2-8400-b12baf800baa" containerID="f8ff919f7d2a453d6497a7e7fa333f979d299990ca42012a62c1896d1dbd9155" exitCode=0 Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.685815 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-917c-account-create-update-kfdqq" event={"ID":"b4a0a450-f81c-41d2-8400-b12baf800baa","Type":"ContainerDied","Data":"f8ff919f7d2a453d6497a7e7fa333f979d299990ca42012a62c1896d1dbd9155"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.685832 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-917c-account-create-update-kfdqq" event={"ID":"b4a0a450-f81c-41d2-8400-b12baf800baa","Type":"ContainerStarted","Data":"39a7c7bb7a0397d13eecd4ce124a918e0423d5f83d29033650d8c5ba0e5ff8cd"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.692237 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a"} Jan 28 16:06:06 crc kubenswrapper[4811]: I0128 16:06:06.692577 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb"} Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.477908 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-ssv4k"] Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.479402 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.481173 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ns2b7" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.482061 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.483075 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.483261 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.491794 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ssv4k"] Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.556251 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kq48\" (UniqueName: \"kubernetes.io/projected/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-kube-api-access-5kq48\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.556319 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-combined-ca-bundle\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.556456 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-config-data\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.657531 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kq48\" (UniqueName: \"kubernetes.io/projected/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-kube-api-access-5kq48\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.657618 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-combined-ca-bundle\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.657750 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-config-data\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.666386 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-config-data\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.673139 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-combined-ca-bundle\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.673975 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-bsvjv"] Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.675485 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.677290 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.677935 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-vn8wr" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.683949 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kq48\" (UniqueName: \"kubernetes.io/projected/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-kube-api-access-5kq48\") pod \"keystone-db-sync-ssv4k\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.717458 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa"} Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.717529 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222"} Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.759982 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-db-sync-config-data\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.760045 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-config-data\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.760135 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-combined-ca-bundle\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.760195 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9ljs\" (UniqueName: \"kubernetes.io/projected/91de0bcd-1c43-42d8-8582-ba86e33b81c2-kube-api-access-l9ljs\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.777141 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-bsvjv"] Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.796728 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.863110 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9ljs\" (UniqueName: \"kubernetes.io/projected/91de0bcd-1c43-42d8-8582-ba86e33b81c2-kube-api-access-l9ljs\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.863213 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-db-sync-config-data\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.863252 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-config-data\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.863338 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-combined-ca-bundle\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.867913 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-db-sync-config-data\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.870403 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-combined-ca-bundle\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.873949 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-config-data\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:07 crc kubenswrapper[4811]: I0128 16:06:07.895058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9ljs\" (UniqueName: \"kubernetes.io/projected/91de0bcd-1c43-42d8-8582-ba86e33b81c2-kube-api-access-l9ljs\") pod \"glance-db-sync-bsvjv\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.033412 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.078009 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bsvjv" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.169282 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqq96\" (UniqueName: \"kubernetes.io/projected/99a30150-2d2e-420e-aba9-d7d99b62a612-kube-api-access-sqq96\") pod \"99a30150-2d2e-420e-aba9-d7d99b62a612\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.169397 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99a30150-2d2e-420e-aba9-d7d99b62a612-operator-scripts\") pod \"99a30150-2d2e-420e-aba9-d7d99b62a612\" (UID: \"99a30150-2d2e-420e-aba9-d7d99b62a612\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.170307 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99a30150-2d2e-420e-aba9-d7d99b62a612-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99a30150-2d2e-420e-aba9-d7d99b62a612" (UID: "99a30150-2d2e-420e-aba9-d7d99b62a612"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.175985 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99a30150-2d2e-420e-aba9-d7d99b62a612-kube-api-access-sqq96" (OuterVolumeSpecName: "kube-api-access-sqq96") pod "99a30150-2d2e-420e-aba9-d7d99b62a612" (UID: "99a30150-2d2e-420e-aba9-d7d99b62a612"). InnerVolumeSpecName "kube-api-access-sqq96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.271554 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqq96\" (UniqueName: \"kubernetes.io/projected/99a30150-2d2e-420e-aba9-d7d99b62a612-kube-api-access-sqq96\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.271906 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99a30150-2d2e-420e-aba9-d7d99b62a612-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.279410 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.322032 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.324533 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.345534 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.375423 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mqgr\" (UniqueName: \"kubernetes.io/projected/b4a0a450-f81c-41d2-8400-b12baf800baa-kube-api-access-5mqgr\") pod \"b4a0a450-f81c-41d2-8400-b12baf800baa\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.375490 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-operator-scripts\") pod \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.375571 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4a0a450-f81c-41d2-8400-b12baf800baa-operator-scripts\") pod \"b4a0a450-f81c-41d2-8400-b12baf800baa\" (UID: \"b4a0a450-f81c-41d2-8400-b12baf800baa\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.375738 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1b1a49-b786-45bb-8f17-d29c10d099bc-operator-scripts\") pod \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.375809 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp6mw\" (UniqueName: \"kubernetes.io/projected/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-kube-api-access-tp6mw\") pod \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\" (UID: \"2f54b0ba-3a97-41d2-a048-01d6dd6daeca\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.376041 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjmfx\" (UniqueName: \"kubernetes.io/projected/4c1b1a49-b786-45bb-8f17-d29c10d099bc-kube-api-access-gjmfx\") pod \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\" (UID: \"4c1b1a49-b786-45bb-8f17-d29c10d099bc\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.380954 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.381754 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4a0a450-f81c-41d2-8400-b12baf800baa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b4a0a450-f81c-41d2-8400-b12baf800baa" (UID: "b4a0a450-f81c-41d2-8400-b12baf800baa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.382492 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c1b1a49-b786-45bb-8f17-d29c10d099bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4c1b1a49-b786-45bb-8f17-d29c10d099bc" (UID: "4c1b1a49-b786-45bb-8f17-d29c10d099bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.382836 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f54b0ba-3a97-41d2-a048-01d6dd6daeca" (UID: "2f54b0ba-3a97-41d2-a048-01d6dd6daeca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.383263 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.383330 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.383353 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4a0a450-f81c-41d2-8400-b12baf800baa-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.383364 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1b1a49-b786-45bb-8f17-d29c10d099bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.388766 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4a0a450-f81c-41d2-8400-b12baf800baa-kube-api-access-5mqgr" (OuterVolumeSpecName: "kube-api-access-5mqgr") pod "b4a0a450-f81c-41d2-8400-b12baf800baa" (UID: "b4a0a450-f81c-41d2-8400-b12baf800baa"). InnerVolumeSpecName "kube-api-access-5mqgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.388953 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-kube-api-access-tp6mw" (OuterVolumeSpecName: "kube-api-access-tp6mw") pod "2f54b0ba-3a97-41d2-a048-01d6dd6daeca" (UID: "2f54b0ba-3a97-41d2-a048-01d6dd6daeca"). InnerVolumeSpecName "kube-api-access-tp6mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.390629 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c1b1a49-b786-45bb-8f17-d29c10d099bc-kube-api-access-gjmfx" (OuterVolumeSpecName: "kube-api-access-gjmfx") pod "4c1b1a49-b786-45bb-8f17-d29c10d099bc" (UID: "4c1b1a49-b786-45bb-8f17-d29c10d099bc"). InnerVolumeSpecName "kube-api-access-gjmfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.487690 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e7de29a-2b04-4dc8-99f1-1d3568819adf-operator-scripts\") pod \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.487731 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b151453e-b13d-4bbe-b156-c7d264aefd8a-operator-scripts\") pod \"b151453e-b13d-4bbe-b156-c7d264aefd8a\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.487755 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27mwz\" (UniqueName: \"kubernetes.io/projected/8e7de29a-2b04-4dc8-99f1-1d3568819adf-kube-api-access-27mwz\") pod \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\" (UID: \"8e7de29a-2b04-4dc8-99f1-1d3568819adf\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.487820 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx25x\" (UniqueName: \"kubernetes.io/projected/69c315f0-b684-4e32-b680-ca6bc1823beb-kube-api-access-qx25x\") pod \"69c315f0-b684-4e32-b680-ca6bc1823beb\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.487837 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c315f0-b684-4e32-b680-ca6bc1823beb-operator-scripts\") pod \"69c315f0-b684-4e32-b680-ca6bc1823beb\" (UID: \"69c315f0-b684-4e32-b680-ca6bc1823beb\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.487897 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkqht\" (UniqueName: \"kubernetes.io/projected/b151453e-b13d-4bbe-b156-c7d264aefd8a-kube-api-access-hkqht\") pod \"b151453e-b13d-4bbe-b156-c7d264aefd8a\" (UID: \"b151453e-b13d-4bbe-b156-c7d264aefd8a\") " Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.488208 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjmfx\" (UniqueName: \"kubernetes.io/projected/4c1b1a49-b786-45bb-8f17-d29c10d099bc-kube-api-access-gjmfx\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.488219 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mqgr\" (UniqueName: \"kubernetes.io/projected/b4a0a450-f81c-41d2-8400-b12baf800baa-kube-api-access-5mqgr\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.488228 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp6mw\" (UniqueName: \"kubernetes.io/projected/2f54b0ba-3a97-41d2-a048-01d6dd6daeca-kube-api-access-tp6mw\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.498746 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b151453e-b13d-4bbe-b156-c7d264aefd8a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b151453e-b13d-4bbe-b156-c7d264aefd8a" (UID: "b151453e-b13d-4bbe-b156-c7d264aefd8a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.499139 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69c315f0-b684-4e32-b680-ca6bc1823beb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69c315f0-b684-4e32-b680-ca6bc1823beb" (UID: "69c315f0-b684-4e32-b680-ca6bc1823beb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.499749 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e7de29a-2b04-4dc8-99f1-1d3568819adf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8e7de29a-2b04-4dc8-99f1-1d3568819adf" (UID: "8e7de29a-2b04-4dc8-99f1-1d3568819adf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.513675 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69c315f0-b684-4e32-b680-ca6bc1823beb-kube-api-access-qx25x" (OuterVolumeSpecName: "kube-api-access-qx25x") pod "69c315f0-b684-4e32-b680-ca6bc1823beb" (UID: "69c315f0-b684-4e32-b680-ca6bc1823beb"). InnerVolumeSpecName "kube-api-access-qx25x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.520611 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b151453e-b13d-4bbe-b156-c7d264aefd8a-kube-api-access-hkqht" (OuterVolumeSpecName: "kube-api-access-hkqht") pod "b151453e-b13d-4bbe-b156-c7d264aefd8a" (UID: "b151453e-b13d-4bbe-b156-c7d264aefd8a"). InnerVolumeSpecName "kube-api-access-hkqht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.520712 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e7de29a-2b04-4dc8-99f1-1d3568819adf-kube-api-access-27mwz" (OuterVolumeSpecName: "kube-api-access-27mwz") pod "8e7de29a-2b04-4dc8-99f1-1d3568819adf" (UID: "8e7de29a-2b04-4dc8-99f1-1d3568819adf"). InnerVolumeSpecName "kube-api-access-27mwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.613645 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e7de29a-2b04-4dc8-99f1-1d3568819adf-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.613701 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b151453e-b13d-4bbe-b156-c7d264aefd8a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.613714 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27mwz\" (UniqueName: \"kubernetes.io/projected/8e7de29a-2b04-4dc8-99f1-1d3568819adf-kube-api-access-27mwz\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.613744 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx25x\" (UniqueName: \"kubernetes.io/projected/69c315f0-b684-4e32-b680-ca6bc1823beb-kube-api-access-qx25x\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.613758 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c315f0-b684-4e32-b680-ca6bc1823beb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.613770 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkqht\" (UniqueName: \"kubernetes.io/projected/b151453e-b13d-4bbe-b156-c7d264aefd8a-kube-api-access-hkqht\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.686056 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ssv4k"] Jan 28 16:06:08 crc kubenswrapper[4811]: W0128 16:06:08.702848 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fbf8a43_325c_4b8b_bdfa_8909e88b6b31.slice/crio-6e549ebb3f407510065adee1edd389292642968f40b1695f2423a7893ea12c96 WatchSource:0}: Error finding container 6e549ebb3f407510065adee1edd389292642968f40b1695f2423a7893ea12c96: Status 404 returned error can't find the container with id 6e549ebb3f407510065adee1edd389292642968f40b1695f2423a7893ea12c96 Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.728897 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-89d7-account-create-update-mc8pj" event={"ID":"b151453e-b13d-4bbe-b156-c7d264aefd8a","Type":"ContainerDied","Data":"c9ee58aa81859dae05a9098b0ad2b58fd58c3cefa6ed5ec2258b335cc9d8652e"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.728930 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9ee58aa81859dae05a9098b0ad2b58fd58c3cefa6ed5ec2258b335cc9d8652e" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.728990 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-mc8pj" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.736069 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9g69v" event={"ID":"99a30150-2d2e-420e-aba9-d7d99b62a612","Type":"ContainerDied","Data":"ba39a2972426d7c9ea47b497264068df024d7cceb8186d58579bb53c25e9221b"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.736095 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba39a2972426d7c9ea47b497264068df024d7cceb8186d58579bb53c25e9221b" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.736217 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9g69v" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.741986 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e221-account-create-update-6bntv" event={"ID":"4c1b1a49-b786-45bb-8f17-d29c10d099bc","Type":"ContainerDied","Data":"0a90eb3ba774e445220689ac8c2bc6ea959538dceee195d9938a0eaa42c16b90"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.742019 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a90eb3ba774e445220689ac8c2bc6ea959538dceee195d9938a0eaa42c16b90" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.742081 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e221-account-create-update-6bntv" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.747300 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kqxgl" event={"ID":"69c315f0-b684-4e32-b680-ca6bc1823beb","Type":"ContainerDied","Data":"a510eedb9af53077f4baff2a396bf4d916bcc02001c2ac613c1c28eed14c7b0c"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.747351 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a510eedb9af53077f4baff2a396bf4d916bcc02001c2ac613c1c28eed14c7b0c" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.747542 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kqxgl" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.751346 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h47z9" event={"ID":"8e7de29a-2b04-4dc8-99f1-1d3568819adf","Type":"ContainerDied","Data":"81cb9bf548d042b6de2d6fb7d1a6e45267728c06e1e054b714f6c3cbf72cdd65"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.751390 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81cb9bf548d042b6de2d6fb7d1a6e45267728c06e1e054b714f6c3cbf72cdd65" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.751791 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h47z9" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.753307 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ssv4k" event={"ID":"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31","Type":"ContainerStarted","Data":"6e549ebb3f407510065adee1edd389292642968f40b1695f2423a7893ea12c96"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.759484 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-2f6fc" event={"ID":"2f54b0ba-3a97-41d2-a048-01d6dd6daeca","Type":"ContainerDied","Data":"841944ffa3993b8bb05cd3cdfc76138285a6189ae02929aceba48b01849caa98"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.759514 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="841944ffa3993b8bb05cd3cdfc76138285a6189ae02929aceba48b01849caa98" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.759569 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2f6fc" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.765147 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-917c-account-create-update-kfdqq" event={"ID":"b4a0a450-f81c-41d2-8400-b12baf800baa","Type":"ContainerDied","Data":"39a7c7bb7a0397d13eecd4ce124a918e0423d5f83d29033650d8c5ba0e5ff8cd"} Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.765203 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39a7c7bb7a0397d13eecd4ce124a918e0423d5f83d29033650d8c5ba0e5ff8cd" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.765258 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-kfdqq" Jan 28 16:06:08 crc kubenswrapper[4811]: I0128 16:06:08.801524 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-bsvjv"] Jan 28 16:06:08 crc kubenswrapper[4811]: W0128 16:06:08.811622 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91de0bcd_1c43_42d8_8582_ba86e33b81c2.slice/crio-deee3d5a159584e1e720024ddc5eb4cc71a7bc179a05b7541922fa10a64e0fd3 WatchSource:0}: Error finding container deee3d5a159584e1e720024ddc5eb4cc71a7bc179a05b7541922fa10a64e0fd3: Status 404 returned error can't find the container with id deee3d5a159584e1e720024ddc5eb4cc71a7bc179a05b7541922fa10a64e0fd3 Jan 28 16:06:09 crc kubenswrapper[4811]: I0128 16:06:09.775774 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bsvjv" event={"ID":"91de0bcd-1c43-42d8-8582-ba86e33b81c2","Type":"ContainerStarted","Data":"deee3d5a159584e1e720024ddc5eb4cc71a7bc179a05b7541922fa10a64e0fd3"} Jan 28 16:06:10 crc kubenswrapper[4811]: I0128 16:06:10.495507 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-9g69v"] Jan 28 16:06:10 crc kubenswrapper[4811]: I0128 16:06:10.503413 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-9g69v"] Jan 28 16:06:10 crc kubenswrapper[4811]: I0128 16:06:10.795749 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd"} Jan 28 16:06:10 crc kubenswrapper[4811]: I0128 16:06:10.796052 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa"} Jan 28 16:06:10 crc kubenswrapper[4811]: I0128 16:06:10.796065 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9"} Jan 28 16:06:11 crc kubenswrapper[4811]: I0128 16:06:11.811756 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea"} Jan 28 16:06:12 crc kubenswrapper[4811]: I0128 16:06:12.357752 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99a30150-2d2e-420e-aba9-d7d99b62a612" path="/var/lib/kubelet/pods/99a30150-2d2e-420e-aba9-d7d99b62a612/volumes" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.000341 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99a30150_2d2e_420e_aba9_d7d99b62a612.slice\": RecentStats: unable to find data in memory cache]" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.514920 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-t9s25"] Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515262 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f54b0ba-3a97-41d2-a048-01d6dd6daeca" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515274 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f54b0ba-3a97-41d2-a048-01d6dd6daeca" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515287 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b151453e-b13d-4bbe-b156-c7d264aefd8a" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515293 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b151453e-b13d-4bbe-b156-c7d264aefd8a" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515308 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4a0a450-f81c-41d2-8400-b12baf800baa" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515315 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4a0a450-f81c-41d2-8400-b12baf800baa" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515325 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c315f0-b684-4e32-b680-ca6bc1823beb" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515330 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c315f0-b684-4e32-b680-ca6bc1823beb" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515344 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7de29a-2b04-4dc8-99f1-1d3568819adf" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515351 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7de29a-2b04-4dc8-99f1-1d3568819adf" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515365 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99a30150-2d2e-420e-aba9-d7d99b62a612" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515370 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="99a30150-2d2e-420e-aba9-d7d99b62a612" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: E0128 16:06:15.515390 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c1b1a49-b786-45bb-8f17-d29c10d099bc" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515396 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c1b1a49-b786-45bb-8f17-d29c10d099bc" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515553 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e7de29a-2b04-4dc8-99f1-1d3568819adf" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515567 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4a0a450-f81c-41d2-8400-b12baf800baa" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515583 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b151453e-b13d-4bbe-b156-c7d264aefd8a" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515597 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="69c315f0-b684-4e32-b680-ca6bc1823beb" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515609 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f54b0ba-3a97-41d2-a048-01d6dd6daeca" containerName="mariadb-database-create" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515618 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c1b1a49-b786-45bb-8f17-d29c10d099bc" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.515626 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="99a30150-2d2e-420e-aba9-d7d99b62a612" containerName="mariadb-account-create-update" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.516144 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.519293 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.527811 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-t9s25"] Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.646726 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505639ba-81cf-4cae-b494-e88d944457ea-operator-scripts\") pod \"root-account-create-update-t9s25\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.646872 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msfz4\" (UniqueName: \"kubernetes.io/projected/505639ba-81cf-4cae-b494-e88d944457ea-kube-api-access-msfz4\") pod \"root-account-create-update-t9s25\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.748905 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505639ba-81cf-4cae-b494-e88d944457ea-operator-scripts\") pod \"root-account-create-update-t9s25\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.749503 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msfz4\" (UniqueName: \"kubernetes.io/projected/505639ba-81cf-4cae-b494-e88d944457ea-kube-api-access-msfz4\") pod \"root-account-create-update-t9s25\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.750058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505639ba-81cf-4cae-b494-e88d944457ea-operator-scripts\") pod \"root-account-create-update-t9s25\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.779532 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msfz4\" (UniqueName: \"kubernetes.io/projected/505639ba-81cf-4cae-b494-e88d944457ea-kube-api-access-msfz4\") pod \"root-account-create-update-t9s25\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:15 crc kubenswrapper[4811]: I0128 16:06:15.849231 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:19 crc kubenswrapper[4811]: I0128 16:06:19.100939 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 28 16:06:25 crc kubenswrapper[4811]: E0128 16:06:25.189081 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99a30150_2d2e_420e_aba9_d7d99b62a612.slice\": RecentStats: unable to find data in memory cache]" Jan 28 16:06:25 crc kubenswrapper[4811]: E0128 16:06:25.400234 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 28 16:06:25 crc kubenswrapper[4811]: E0128 16:06:25.400532 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l9ljs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-bsvjv_openstack(91de0bcd-1c43-42d8-8582-ba86e33b81c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:06:25 crc kubenswrapper[4811]: E0128 16:06:25.401725 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-bsvjv" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" Jan 28 16:06:25 crc kubenswrapper[4811]: I0128 16:06:25.899237 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-t9s25"] Jan 28 16:06:25 crc kubenswrapper[4811]: W0128 16:06:25.906022 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod505639ba_81cf_4cae_b494_e88d944457ea.slice/crio-18659e849644afd924e0a0e6cba2bcac6d106a4a7f542ba958552ce4ceb28cf9 WatchSource:0}: Error finding container 18659e849644afd924e0a0e6cba2bcac6d106a4a7f542ba958552ce4ceb28cf9: Status 404 returned error can't find the container with id 18659e849644afd924e0a0e6cba2bcac6d106a4a7f542ba958552ce4ceb28cf9 Jan 28 16:06:25 crc kubenswrapper[4811]: I0128 16:06:25.933483 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ssv4k" event={"ID":"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31","Type":"ContainerStarted","Data":"6bab905966a0769cc19f4f9e9080a14cf74fc575091dd1fb4eefb542e81f131c"} Jan 28 16:06:25 crc kubenswrapper[4811]: I0128 16:06:25.938266 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12"} Jan 28 16:06:25 crc kubenswrapper[4811]: I0128 16:06:25.940412 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-t9s25" event={"ID":"505639ba-81cf-4cae-b494-e88d944457ea","Type":"ContainerStarted","Data":"18659e849644afd924e0a0e6cba2bcac6d106a4a7f542ba958552ce4ceb28cf9"} Jan 28 16:06:25 crc kubenswrapper[4811]: E0128 16:06:25.940998 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-bsvjv" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.952718 4811 generic.go:334] "Generic (PLEG): container finished" podID="505639ba-81cf-4cae-b494-e88d944457ea" containerID="1cacc3d8ee6e5ba0678d2018830f89cb975dc25132dd501c515a3eb1e95fb565" exitCode=0 Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.952900 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-t9s25" event={"ID":"505639ba-81cf-4cae-b494-e88d944457ea","Type":"ContainerDied","Data":"1cacc3d8ee6e5ba0678d2018830f89cb975dc25132dd501c515a3eb1e95fb565"} Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.978705 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8"} Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.978742 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44"} Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.978751 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365"} Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.978767 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac"} Jan 28 16:06:26 crc kubenswrapper[4811]: I0128 16:06:26.994809 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-ssv4k" podStartSLOduration=3.22610845 podStartE2EDuration="19.994785914s" podCreationTimestamp="2026-01-28 16:06:07 +0000 UTC" firstStartedPulling="2026-01-28 16:06:08.705976206 +0000 UTC m=+1261.460339789" lastFinishedPulling="2026-01-28 16:06:25.47465367 +0000 UTC m=+1278.229017253" observedRunningTime="2026-01-28 16:06:26.993784907 +0000 UTC m=+1279.748148490" watchObservedRunningTime="2026-01-28 16:06:26.994785914 +0000 UTC m=+1279.749149497" Jan 28 16:06:27 crc kubenswrapper[4811]: I0128 16:06:27.992418 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47"} Jan 28 16:06:27 crc kubenswrapper[4811]: I0128 16:06:27.992498 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerStarted","Data":"07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3"} Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.049227 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.861979804 podStartE2EDuration="59.049208574s" podCreationTimestamp="2026-01-28 16:05:29 +0000 UTC" firstStartedPulling="2026-01-28 16:06:03.307491151 +0000 UTC m=+1256.061854734" lastFinishedPulling="2026-01-28 16:06:25.494719921 +0000 UTC m=+1278.249083504" observedRunningTime="2026-01-28 16:06:28.043323114 +0000 UTC m=+1280.797686717" watchObservedRunningTime="2026-01-28 16:06:28.049208574 +0000 UTC m=+1280.803572167" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.337480 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.355730 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dd4dv"] Jan 28 16:06:28 crc kubenswrapper[4811]: E0128 16:06:28.356322 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="505639ba-81cf-4cae-b494-e88d944457ea" containerName="mariadb-account-create-update" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.356351 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="505639ba-81cf-4cae-b494-e88d944457ea" containerName="mariadb-account-create-update" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.356627 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="505639ba-81cf-4cae-b494-e88d944457ea" containerName="mariadb-account-create-update" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.357848 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.362913 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.432709 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dd4dv"] Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.464840 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505639ba-81cf-4cae-b494-e88d944457ea-operator-scripts\") pod \"505639ba-81cf-4cae-b494-e88d944457ea\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.465118 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msfz4\" (UniqueName: \"kubernetes.io/projected/505639ba-81cf-4cae-b494-e88d944457ea-kube-api-access-msfz4\") pod \"505639ba-81cf-4cae-b494-e88d944457ea\" (UID: \"505639ba-81cf-4cae-b494-e88d944457ea\") " Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.466169 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/505639ba-81cf-4cae-b494-e88d944457ea-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "505639ba-81cf-4cae-b494-e88d944457ea" (UID: "505639ba-81cf-4cae-b494-e88d944457ea"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.466197 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-swift-storage-0\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.466260 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-nb\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.466282 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-config\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.469912 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-svc\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.470191 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-sb\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.470373 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scn78\" (UniqueName: \"kubernetes.io/projected/76dc5dd7-5559-47a6-9d0d-c576172cf695-kube-api-access-scn78\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.470547 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505639ba-81cf-4cae-b494-e88d944457ea-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.477661 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/505639ba-81cf-4cae-b494-e88d944457ea-kube-api-access-msfz4" (OuterVolumeSpecName: "kube-api-access-msfz4") pod "505639ba-81cf-4cae-b494-e88d944457ea" (UID: "505639ba-81cf-4cae-b494-e88d944457ea"). InnerVolumeSpecName "kube-api-access-msfz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.571852 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-swift-storage-0\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.571920 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-nb\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.571947 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-config\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.572032 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-svc\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.572067 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-sb\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.572103 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scn78\" (UniqueName: \"kubernetes.io/projected/76dc5dd7-5559-47a6-9d0d-c576172cf695-kube-api-access-scn78\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.572159 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msfz4\" (UniqueName: \"kubernetes.io/projected/505639ba-81cf-4cae-b494-e88d944457ea-kube-api-access-msfz4\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.574019 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-svc\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.574020 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-config\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.574091 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-sb\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.574397 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-swift-storage-0\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.574877 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-nb\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.591094 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scn78\" (UniqueName: \"kubernetes.io/projected/76dc5dd7-5559-47a6-9d0d-c576172cf695-kube-api-access-scn78\") pod \"dnsmasq-dns-75bdffd66f-dd4dv\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:28 crc kubenswrapper[4811]: I0128 16:06:28.674369 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:29 crc kubenswrapper[4811]: I0128 16:06:29.005228 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-t9s25" Jan 28 16:06:29 crc kubenswrapper[4811]: I0128 16:06:29.005488 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-t9s25" event={"ID":"505639ba-81cf-4cae-b494-e88d944457ea","Type":"ContainerDied","Data":"18659e849644afd924e0a0e6cba2bcac6d106a4a7f542ba958552ce4ceb28cf9"} Jan 28 16:06:29 crc kubenswrapper[4811]: I0128 16:06:29.006808 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18659e849644afd924e0a0e6cba2bcac6d106a4a7f542ba958552ce4ceb28cf9" Jan 28 16:06:29 crc kubenswrapper[4811]: I0128 16:06:29.007573 4811 generic.go:334] "Generic (PLEG): container finished" podID="1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" containerID="6bab905966a0769cc19f4f9e9080a14cf74fc575091dd1fb4eefb542e81f131c" exitCode=0 Jan 28 16:06:29 crc kubenswrapper[4811]: I0128 16:06:29.007951 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ssv4k" event={"ID":"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31","Type":"ContainerDied","Data":"6bab905966a0769cc19f4f9e9080a14cf74fc575091dd1fb4eefb542e81f131c"} Jan 28 16:06:29 crc kubenswrapper[4811]: I0128 16:06:29.107980 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dd4dv"] Jan 28 16:06:29 crc kubenswrapper[4811]: W0128 16:06:29.116800 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76dc5dd7_5559_47a6_9d0d_c576172cf695.slice/crio-846ac74e131afacb3aa8e8987620ba8115bd2d63082b50ce8a82cc23898f50ad WatchSource:0}: Error finding container 846ac74e131afacb3aa8e8987620ba8115bd2d63082b50ce8a82cc23898f50ad: Status 404 returned error can't find the container with id 846ac74e131afacb3aa8e8987620ba8115bd2d63082b50ce8a82cc23898f50ad Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.017886 4811 generic.go:334] "Generic (PLEG): container finished" podID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerID="1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00" exitCode=0 Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.017942 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" event={"ID":"76dc5dd7-5559-47a6-9d0d-c576172cf695","Type":"ContainerDied","Data":"1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00"} Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.018271 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" event={"ID":"76dc5dd7-5559-47a6-9d0d-c576172cf695","Type":"ContainerStarted","Data":"846ac74e131afacb3aa8e8987620ba8115bd2d63082b50ce8a82cc23898f50ad"} Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.309621 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.425054 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-config-data\") pod \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.425207 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-combined-ca-bundle\") pod \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.425265 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kq48\" (UniqueName: \"kubernetes.io/projected/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-kube-api-access-5kq48\") pod \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\" (UID: \"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31\") " Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.429987 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-kube-api-access-5kq48" (OuterVolumeSpecName: "kube-api-access-5kq48") pod "1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" (UID: "1fbf8a43-325c-4b8b-bdfa-8909e88b6b31"). InnerVolumeSpecName "kube-api-access-5kq48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.449665 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" (UID: "1fbf8a43-325c-4b8b-bdfa-8909e88b6b31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.474928 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-config-data" (OuterVolumeSpecName: "config-data") pod "1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" (UID: "1fbf8a43-325c-4b8b-bdfa-8909e88b6b31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.527521 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.527558 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kq48\" (UniqueName: \"kubernetes.io/projected/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-kube-api-access-5kq48\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:30 crc kubenswrapper[4811]: I0128 16:06:30.527573 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.028793 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" event={"ID":"76dc5dd7-5559-47a6-9d0d-c576172cf695","Type":"ContainerStarted","Data":"c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56"} Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.028920 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.030607 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ssv4k" event={"ID":"1fbf8a43-325c-4b8b-bdfa-8909e88b6b31","Type":"ContainerDied","Data":"6e549ebb3f407510065adee1edd389292642968f40b1695f2423a7893ea12c96"} Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.030643 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e549ebb3f407510065adee1edd389292642968f40b1695f2423a7893ea12c96" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.030735 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ssv4k" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.066969 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" podStartSLOduration=3.066949215 podStartE2EDuration="3.066949215s" podCreationTimestamp="2026-01-28 16:06:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:06:31.052066963 +0000 UTC m=+1283.806430566" watchObservedRunningTime="2026-01-28 16:06:31.066949215 +0000 UTC m=+1283.821312798" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.306298 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-b5lhm"] Jan 28 16:06:31 crc kubenswrapper[4811]: E0128 16:06:31.306786 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" containerName="keystone-db-sync" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.306812 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" containerName="keystone-db-sync" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.307040 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" containerName="keystone-db-sync" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.307726 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.314230 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ns2b7" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.314613 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.314794 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.314450 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.320987 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dd4dv"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.321786 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.340096 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-b5lhm"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.386282 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-d5jv2"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.394755 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.437884 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-d5jv2"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.449564 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccw45\" (UniqueName: \"kubernetes.io/projected/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-kube-api-access-ccw45\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.449641 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-scripts\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.449823 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-fernet-keys\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.449853 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-credential-keys\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.450007 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-combined-ca-bundle\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.450035 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-config-data\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.522413 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-hdczs"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.523789 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.535798 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.535990 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qd4pr" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.549624 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551275 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-combined-ca-bundle\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551325 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-config-data\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551356 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-swift-storage-0\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551385 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-svc\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551417 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-nb\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551471 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-config\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551501 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccw45\" (UniqueName: \"kubernetes.io/projected/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-kube-api-access-ccw45\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551529 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-scripts\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551574 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgkxj\" (UniqueName: \"kubernetes.io/projected/1a5ff06a-d87b-4f48-b51f-328830acba2b-kube-api-access-hgkxj\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551634 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-sb\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551670 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-fernet-keys\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.551704 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-credential-keys\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.555966 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-hdczs"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.572971 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-fernet-keys\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.573203 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-scripts\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.583165 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-config-data\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.586927 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-combined-ca-bundle\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.593772 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-credential-keys\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.606177 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccw45\" (UniqueName: \"kubernetes.io/projected/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-kube-api-access-ccw45\") pod \"keystone-bootstrap-b5lhm\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.629456 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.638256 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-vhxrm"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.652470 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-4bw6q"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653352 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-config-data\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653414 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-sb\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653493 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j47qm\" (UniqueName: \"kubernetes.io/projected/7acf4fe9-fb7e-4260-9976-04deecc1264d-kube-api-access-j47qm\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653514 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-swift-storage-0\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653535 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-svc\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653550 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7acf4fe9-fb7e-4260-9976-04deecc1264d-etc-machine-id\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653570 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-db-sync-config-data\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-nb\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653615 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-config\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653648 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-combined-ca-bundle\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653666 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-scripts\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.653688 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgkxj\" (UniqueName: \"kubernetes.io/projected/1a5ff06a-d87b-4f48-b51f-328830acba2b-kube-api-access-hgkxj\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.654770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-sb\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.655280 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-swift-storage-0\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.655778 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-svc\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.656286 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-nb\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.657227 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.657303 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.663409 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-config\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.665174 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.665567 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nfgxd" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.666666 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.667392 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ld8qm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.669369 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.670121 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.671574 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vhxrm"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.685488 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgkxj\" (UniqueName: \"kubernetes.io/projected/1a5ff06a-d87b-4f48-b51f-328830acba2b-kube-api-access-hgkxj\") pod \"dnsmasq-dns-77bbd879b9-d5jv2\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.722465 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-d5jv2"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.723067 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.752046 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4bw6q"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762389 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-logs\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762481 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnvv8\" (UniqueName: \"kubernetes.io/projected/55b5b721-9672-447d-b4d8-b53b7de5ce2f-kube-api-access-qnvv8\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762521 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j47qm\" (UniqueName: \"kubernetes.io/projected/7acf4fe9-fb7e-4260-9976-04deecc1264d-kube-api-access-j47qm\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762546 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-combined-ca-bundle\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7acf4fe9-fb7e-4260-9976-04deecc1264d-etc-machine-id\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762603 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-combined-ca-bundle\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762634 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-db-sync-config-data\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762694 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-combined-ca-bundle\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762720 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-scripts\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762744 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-scripts\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762798 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rn2z\" (UniqueName: \"kubernetes.io/projected/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-kube-api-access-8rn2z\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762828 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-config-data\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762862 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-config-data\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.762900 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-config\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.763314 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7acf4fe9-fb7e-4260-9976-04deecc1264d-etc-machine-id\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.774242 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-db-sync-config-data\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.775141 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-combined-ca-bundle\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.786632 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-scripts\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.794806 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-config-data\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.804174 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j47qm\" (UniqueName: \"kubernetes.io/projected/7acf4fe9-fb7e-4260-9976-04deecc1264d-kube-api-access-j47qm\") pod \"cinder-db-sync-hdczs\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.821580 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8495b76777-md4rx"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.823405 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.835403 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-md4rx"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.856267 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hdczs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.862373 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-nh6rs"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864412 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-scripts\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864480 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rn2z\" (UniqueName: \"kubernetes.io/projected/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-kube-api-access-8rn2z\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864508 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-config-data\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864534 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-config\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864571 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-logs\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864604 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnvv8\" (UniqueName: \"kubernetes.io/projected/55b5b721-9672-447d-b4d8-b53b7de5ce2f-kube-api-access-qnvv8\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864630 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-combined-ca-bundle\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.864649 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-combined-ca-bundle\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.865662 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.866629 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-logs\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.867956 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-nh6rs"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.869373 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-config\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.869555 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.878218 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-combined-ca-bundle\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.878638 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jrwm4" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.885250 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.885979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-config-data\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.886031 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-combined-ca-bundle\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.888788 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.893786 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-scripts\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.894264 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnvv8\" (UniqueName: \"kubernetes.io/projected/55b5b721-9672-447d-b4d8-b53b7de5ce2f-kube-api-access-qnvv8\") pod \"neutron-db-sync-vhxrm\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.894845 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.895029 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.897416 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.899153 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rn2z\" (UniqueName: \"kubernetes.io/projected/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-kube-api-access-8rn2z\") pod \"placement-db-sync-4bw6q\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966450 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-svc\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966484 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-db-sync-config-data\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966626 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-nb\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966743 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-sb\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966784 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-scripts\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966809 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc8rd\" (UniqueName: \"kubernetes.io/projected/677fa641-ca48-40fe-8b91-40147f1376e5-kube-api-access-lc8rd\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.966873 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-config-data\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967317 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-config\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967397 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm8fn\" (UniqueName: \"kubernetes.io/projected/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-kube-api-access-qm8fn\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967424 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6gzt\" (UniqueName: \"kubernetes.io/projected/4be24dd0-394a-4835-9cb9-2684c2492079-kube-api-access-l6gzt\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967473 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-combined-ca-bundle\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967497 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967522 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-log-httpd\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967542 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-swift-storage-0\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:31 crc kubenswrapper[4811]: I0128 16:06:31.967590 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-run-httpd\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070108 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-config-data\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070187 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-config\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070239 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm8fn\" (UniqueName: \"kubernetes.io/projected/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-kube-api-access-qm8fn\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070267 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6gzt\" (UniqueName: \"kubernetes.io/projected/4be24dd0-394a-4835-9cb9-2684c2492079-kube-api-access-l6gzt\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070293 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-combined-ca-bundle\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070313 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070336 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-log-httpd\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070357 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-swift-storage-0\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070390 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-run-httpd\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070541 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070571 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-svc\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070598 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-db-sync-config-data\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070629 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-sb\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070650 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-nb\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070672 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-scripts\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.070693 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc8rd\" (UniqueName: \"kubernetes.io/projected/677fa641-ca48-40fe-8b91-40147f1376e5-kube-api-access-lc8rd\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.071224 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-config\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.075814 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-svc\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.076536 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-sb\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.076733 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-nb\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.077689 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-swift-storage-0\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.081261 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-log-httpd\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.082135 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-scripts\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.082480 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-config-data\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.083486 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.083641 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-db-sync-config-data\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.083807 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-run-httpd\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.084518 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.093513 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6gzt\" (UniqueName: \"kubernetes.io/projected/4be24dd0-394a-4835-9cb9-2684c2492079-kube-api-access-l6gzt\") pod \"dnsmasq-dns-8495b76777-md4rx\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.093866 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.103058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc8rd\" (UniqueName: \"kubernetes.io/projected/677fa641-ca48-40fe-8b91-40147f1376e5-kube-api-access-lc8rd\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.103525 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-combined-ca-bundle\") pod \"barbican-db-sync-nh6rs\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.106856 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm8fn\" (UniqueName: \"kubernetes.io/projected/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-kube-api-access-qm8fn\") pod \"ceilometer-0\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.129863 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4bw6q" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.153803 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.227059 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.261568 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.272458 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-b5lhm"] Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.420355 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-d5jv2"] Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.478056 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-hdczs"] Jan 28 16:06:32 crc kubenswrapper[4811]: W0128 16:06:32.494303 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7acf4fe9_fb7e_4260_9976_04deecc1264d.slice/crio-b410cd4b2d52947c3a819e8421138cf44d3e13b74012377d94031bf894b6ec20 WatchSource:0}: Error finding container b410cd4b2d52947c3a819e8421138cf44d3e13b74012377d94031bf894b6ec20: Status 404 returned error can't find the container with id b410cd4b2d52947c3a819e8421138cf44d3e13b74012377d94031bf894b6ec20 Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.703411 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vhxrm"] Jan 28 16:06:32 crc kubenswrapper[4811]: W0128 16:06:32.715661 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55b5b721_9672_447d_b4d8_b53b7de5ce2f.slice/crio-8713df3aaa5d14c4f39a8ad7a10591044fb086b3c3dc20dca00a98ed0d2fd962 WatchSource:0}: Error finding container 8713df3aaa5d14c4f39a8ad7a10591044fb086b3c3dc20dca00a98ed0d2fd962: Status 404 returned error can't find the container with id 8713df3aaa5d14c4f39a8ad7a10591044fb086b3c3dc20dca00a98ed0d2fd962 Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.835794 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-md4rx"] Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.853384 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4bw6q"] Jan 28 16:06:32 crc kubenswrapper[4811]: W0128 16:06:32.878035 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4be24dd0_394a_4835_9cb9_2684c2492079.slice/crio-dc06a1ba9a7f2d61a5c9e81e18cbff2158e49d3195a8bc19e2e12cf91e1a3661 WatchSource:0}: Error finding container dc06a1ba9a7f2d61a5c9e81e18cbff2158e49d3195a8bc19e2e12cf91e1a3661: Status 404 returned error can't find the container with id dc06a1ba9a7f2d61a5c9e81e18cbff2158e49d3195a8bc19e2e12cf91e1a3661 Jan 28 16:06:32 crc kubenswrapper[4811]: I0128 16:06:32.965454 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-nh6rs"] Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.065872 4811 generic.go:334] "Generic (PLEG): container finished" podID="1a5ff06a-d87b-4f48-b51f-328830acba2b" containerID="7da81a3f5ffcca77a15804cf67c1253b7ce5e11ab9cd37662ca43a76dd5d6dcc" exitCode=0 Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.065951 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" event={"ID":"1a5ff06a-d87b-4f48-b51f-328830acba2b","Type":"ContainerDied","Data":"7da81a3f5ffcca77a15804cf67c1253b7ce5e11ab9cd37662ca43a76dd5d6dcc"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.065984 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" event={"ID":"1a5ff06a-d87b-4f48-b51f-328830acba2b","Type":"ContainerStarted","Data":"3b88b650d80c0a5bceff64d81cb3349ea9d7583de6e592d34dc48b49002e6041"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.069380 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b5lhm" event={"ID":"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad","Type":"ContainerStarted","Data":"b0ddc832af1456e23eda70e2a290a6a34b84838cf3f77696c6d4ac858b8f50eb"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.069424 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b5lhm" event={"ID":"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad","Type":"ContainerStarted","Data":"6db4819d4e51173d77f9f93efc6e2939df8fa1cddb225eb73a62b7fa7f6101fc"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.074314 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-md4rx" event={"ID":"4be24dd0-394a-4835-9cb9-2684c2492079","Type":"ContainerStarted","Data":"dc06a1ba9a7f2d61a5c9e81e18cbff2158e49d3195a8bc19e2e12cf91e1a3661"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.076011 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vhxrm" event={"ID":"55b5b721-9672-447d-b4d8-b53b7de5ce2f","Type":"ContainerStarted","Data":"9163fd29bb4d295b9f0469a6d83cf56f2bde17afe8a70e37c7ef65ad1e87b52d"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.076057 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vhxrm" event={"ID":"55b5b721-9672-447d-b4d8-b53b7de5ce2f","Type":"ContainerStarted","Data":"8713df3aaa5d14c4f39a8ad7a10591044fb086b3c3dc20dca00a98ed0d2fd962"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.086271 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hdczs" event={"ID":"7acf4fe9-fb7e-4260-9976-04deecc1264d","Type":"ContainerStarted","Data":"b410cd4b2d52947c3a819e8421138cf44d3e13b74012377d94031bf894b6ec20"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.087512 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.087545 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.092628 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nh6rs" event={"ID":"677fa641-ca48-40fe-8b91-40147f1376e5","Type":"ContainerStarted","Data":"fa4f5e876aa27c3102b8369872e598c2a55b9bca6d184304a850bfaebfc2434d"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.097806 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerName="dnsmasq-dns" containerID="cri-o://c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56" gracePeriod=10 Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.098129 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4bw6q" event={"ID":"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1","Type":"ContainerStarted","Data":"7326795b24f1232d411422ad0f3d3b776ea81c6ba3dd9d21035934215b64cc63"} Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.106649 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.130365 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-vhxrm" podStartSLOduration=2.130347947 podStartE2EDuration="2.130347947s" podCreationTimestamp="2026-01-28 16:06:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:06:33.128843107 +0000 UTC m=+1285.883206690" watchObservedRunningTime="2026-01-28 16:06:33.130347947 +0000 UTC m=+1285.884711530" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.174915 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-b5lhm" podStartSLOduration=2.174900099 podStartE2EDuration="2.174900099s" podCreationTimestamp="2026-01-28 16:06:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:06:33.170415118 +0000 UTC m=+1285.924778701" watchObservedRunningTime="2026-01-28 16:06:33.174900099 +0000 UTC m=+1285.929263682" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.303251 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.550345 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.624294 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgkxj\" (UniqueName: \"kubernetes.io/projected/1a5ff06a-d87b-4f48-b51f-328830acba2b-kube-api-access-hgkxj\") pod \"1a5ff06a-d87b-4f48-b51f-328830acba2b\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.624504 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-nb\") pod \"1a5ff06a-d87b-4f48-b51f-328830acba2b\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.624566 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-svc\") pod \"1a5ff06a-d87b-4f48-b51f-328830acba2b\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.624600 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-sb\") pod \"1a5ff06a-d87b-4f48-b51f-328830acba2b\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.624646 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-config\") pod \"1a5ff06a-d87b-4f48-b51f-328830acba2b\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.624669 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-swift-storage-0\") pod \"1a5ff06a-d87b-4f48-b51f-328830acba2b\" (UID: \"1a5ff06a-d87b-4f48-b51f-328830acba2b\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.643096 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a5ff06a-d87b-4f48-b51f-328830acba2b-kube-api-access-hgkxj" (OuterVolumeSpecName: "kube-api-access-hgkxj") pod "1a5ff06a-d87b-4f48-b51f-328830acba2b" (UID: "1a5ff06a-d87b-4f48-b51f-328830acba2b"). InnerVolumeSpecName "kube-api-access-hgkxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.669262 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a5ff06a-d87b-4f48-b51f-328830acba2b" (UID: "1a5ff06a-d87b-4f48-b51f-328830acba2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.676349 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a5ff06a-d87b-4f48-b51f-328830acba2b" (UID: "1a5ff06a-d87b-4f48-b51f-328830acba2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.676672 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a5ff06a-d87b-4f48-b51f-328830acba2b" (UID: "1a5ff06a-d87b-4f48-b51f-328830acba2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.684587 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a5ff06a-d87b-4f48-b51f-328830acba2b" (UID: "1a5ff06a-d87b-4f48-b51f-328830acba2b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.694278 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-config" (OuterVolumeSpecName: "config") pod "1a5ff06a-d87b-4f48-b51f-328830acba2b" (UID: "1a5ff06a-d87b-4f48-b51f-328830acba2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.726192 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.726219 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgkxj\" (UniqueName: \"kubernetes.io/projected/1a5ff06a-d87b-4f48-b51f-328830acba2b-kube-api-access-hgkxj\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.726230 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.726240 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.726249 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.726259 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a5ff06a-d87b-4f48-b51f-328830acba2b-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.820314 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.928932 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scn78\" (UniqueName: \"kubernetes.io/projected/76dc5dd7-5559-47a6-9d0d-c576172cf695-kube-api-access-scn78\") pod \"76dc5dd7-5559-47a6-9d0d-c576172cf695\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.929016 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-nb\") pod \"76dc5dd7-5559-47a6-9d0d-c576172cf695\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.929148 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-sb\") pod \"76dc5dd7-5559-47a6-9d0d-c576172cf695\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.929265 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-swift-storage-0\") pod \"76dc5dd7-5559-47a6-9d0d-c576172cf695\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.929320 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-svc\") pod \"76dc5dd7-5559-47a6-9d0d-c576172cf695\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.929407 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-config\") pod \"76dc5dd7-5559-47a6-9d0d-c576172cf695\" (UID: \"76dc5dd7-5559-47a6-9d0d-c576172cf695\") " Jan 28 16:06:33 crc kubenswrapper[4811]: I0128 16:06:33.955394 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76dc5dd7-5559-47a6-9d0d-c576172cf695-kube-api-access-scn78" (OuterVolumeSpecName: "kube-api-access-scn78") pod "76dc5dd7-5559-47a6-9d0d-c576172cf695" (UID: "76dc5dd7-5559-47a6-9d0d-c576172cf695"). InnerVolumeSpecName "kube-api-access-scn78". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.032559 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scn78\" (UniqueName: \"kubernetes.io/projected/76dc5dd7-5559-47a6-9d0d-c576172cf695-kube-api-access-scn78\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.035112 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-config" (OuterVolumeSpecName: "config") pod "76dc5dd7-5559-47a6-9d0d-c576172cf695" (UID: "76dc5dd7-5559-47a6-9d0d-c576172cf695"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.041157 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76dc5dd7-5559-47a6-9d0d-c576172cf695" (UID: "76dc5dd7-5559-47a6-9d0d-c576172cf695"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.052798 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "76dc5dd7-5559-47a6-9d0d-c576172cf695" (UID: "76dc5dd7-5559-47a6-9d0d-c576172cf695"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.076093 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76dc5dd7-5559-47a6-9d0d-c576172cf695" (UID: "76dc5dd7-5559-47a6-9d0d-c576172cf695"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.078561 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76dc5dd7-5559-47a6-9d0d-c576172cf695" (UID: "76dc5dd7-5559-47a6-9d0d-c576172cf695"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.110993 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerStarted","Data":"ea55ed8c92f9871e3e2ed362bf3a0136b29999ee7028849d3c4c091d29fcaa20"} Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.118946 4811 generic.go:334] "Generic (PLEG): container finished" podID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerID="c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56" exitCode=0 Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.119043 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.119052 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" event={"ID":"76dc5dd7-5559-47a6-9d0d-c576172cf695","Type":"ContainerDied","Data":"c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56"} Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.119167 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dd4dv" event={"ID":"76dc5dd7-5559-47a6-9d0d-c576172cf695","Type":"ContainerDied","Data":"846ac74e131afacb3aa8e8987620ba8115bd2d63082b50ce8a82cc23898f50ad"} Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.119186 4811 scope.go:117] "RemoveContainer" containerID="c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.136908 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.136955 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.136973 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.136985 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.137002 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76dc5dd7-5559-47a6-9d0d-c576172cf695-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.137268 4811 generic.go:334] "Generic (PLEG): container finished" podID="4be24dd0-394a-4835-9cb9-2684c2492079" containerID="1f6c627b7301448100cbd519abc5845b3e7b0e44a608610675a09d0300128e2d" exitCode=0 Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.137476 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-md4rx" event={"ID":"4be24dd0-394a-4835-9cb9-2684c2492079","Type":"ContainerDied","Data":"1f6c627b7301448100cbd519abc5845b3e7b0e44a608610675a09d0300128e2d"} Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.163879 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.164615 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bbd879b9-d5jv2" event={"ID":"1a5ff06a-d87b-4f48-b51f-328830acba2b","Type":"ContainerDied","Data":"3b88b650d80c0a5bceff64d81cb3349ea9d7583de6e592d34dc48b49002e6041"} Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.192779 4811 scope.go:117] "RemoveContainer" containerID="1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.212634 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dd4dv"] Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.225958 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dd4dv"] Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.274344 4811 scope.go:117] "RemoveContainer" containerID="c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56" Jan 28 16:06:34 crc kubenswrapper[4811]: E0128 16:06:34.275087 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56\": container with ID starting with c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56 not found: ID does not exist" containerID="c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.275134 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56"} err="failed to get container status \"c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56\": rpc error: code = NotFound desc = could not find container \"c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56\": container with ID starting with c6e19834bdefd40ed23094b1023158e8ea14108c2402ba9c600920253b52af56 not found: ID does not exist" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.275161 4811 scope.go:117] "RemoveContainer" containerID="1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00" Jan 28 16:06:34 crc kubenswrapper[4811]: E0128 16:06:34.275497 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00\": container with ID starting with 1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00 not found: ID does not exist" containerID="1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.275550 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00"} err="failed to get container status \"1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00\": rpc error: code = NotFound desc = could not find container \"1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00\": container with ID starting with 1394e9bb9c8a5692fd17d57c698e9aa69c5ea54103771a0309eebda86500fa00 not found: ID does not exist" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.275586 4811 scope.go:117] "RemoveContainer" containerID="7da81a3f5ffcca77a15804cf67c1253b7ce5e11ab9cd37662ca43a76dd5d6dcc" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.297258 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-d5jv2"] Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.335823 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-d5jv2"] Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.369521 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a5ff06a-d87b-4f48-b51f-328830acba2b" path="/var/lib/kubelet/pods/1a5ff06a-d87b-4f48-b51f-328830acba2b/volumes" Jan 28 16:06:34 crc kubenswrapper[4811]: I0128 16:06:34.371351 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" path="/var/lib/kubelet/pods/76dc5dd7-5559-47a6-9d0d-c576172cf695/volumes" Jan 28 16:06:35 crc kubenswrapper[4811]: I0128 16:06:35.186962 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-md4rx" event={"ID":"4be24dd0-394a-4835-9cb9-2684c2492079","Type":"ContainerStarted","Data":"1857aece1c28a54547dcfcd42cea132c99e0a01aebb8d17bd1c22c2f7eca4e0a"} Jan 28 16:06:35 crc kubenswrapper[4811]: I0128 16:06:35.210734 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8495b76777-md4rx" podStartSLOduration=4.210706906 podStartE2EDuration="4.210706906s" podCreationTimestamp="2026-01-28 16:06:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:06:35.206371289 +0000 UTC m=+1287.960734872" watchObservedRunningTime="2026-01-28 16:06:35.210706906 +0000 UTC m=+1287.965070509" Jan 28 16:06:35 crc kubenswrapper[4811]: E0128 16:06:35.516708 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99a30150_2d2e_420e_aba9_d7d99b62a612.slice\": RecentStats: unable to find data in memory cache]" Jan 28 16:06:36 crc kubenswrapper[4811]: I0128 16:06:36.196615 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:38 crc kubenswrapper[4811]: I0128 16:06:38.221554 4811 generic.go:334] "Generic (PLEG): container finished" podID="7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" containerID="b0ddc832af1456e23eda70e2a290a6a34b84838cf3f77696c6d4ac858b8f50eb" exitCode=0 Jan 28 16:06:38 crc kubenswrapper[4811]: I0128 16:06:38.221959 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b5lhm" event={"ID":"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad","Type":"ContainerDied","Data":"b0ddc832af1456e23eda70e2a290a6a34b84838cf3f77696c6d4ac858b8f50eb"} Jan 28 16:06:42 crc kubenswrapper[4811]: I0128 16:06:42.156676 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:06:42 crc kubenswrapper[4811]: I0128 16:06:42.215730 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-kds67"] Jan 28 16:06:42 crc kubenswrapper[4811]: I0128 16:06:42.215948 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" containerID="cri-o://723c3c1f133020bbd2e12c0cc5bd06cc9712218d2f07e7e49921785e8eb9e001" gracePeriod=10 Jan 28 16:06:43 crc kubenswrapper[4811]: I0128 16:06:43.275245 4811 generic.go:334] "Generic (PLEG): container finished" podID="a3730224-7295-4fea-bd2e-67cad0392474" containerID="723c3c1f133020bbd2e12c0cc5bd06cc9712218d2f07e7e49921785e8eb9e001" exitCode=0 Jan 28 16:06:43 crc kubenswrapper[4811]: I0128 16:06:43.275309 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" event={"ID":"a3730224-7295-4fea-bd2e-67cad0392474","Type":"ContainerDied","Data":"723c3c1f133020bbd2e12c0cc5bd06cc9712218d2f07e7e49921785e8eb9e001"} Jan 28 16:06:44 crc kubenswrapper[4811]: I0128 16:06:44.875608 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: connect: connection refused" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.409131 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.512332 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-credential-keys\") pod \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.512447 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-config-data\") pod \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.512526 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccw45\" (UniqueName: \"kubernetes.io/projected/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-kube-api-access-ccw45\") pod \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.512660 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-combined-ca-bundle\") pod \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.512738 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-scripts\") pod \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.512766 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-fernet-keys\") pod \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\" (UID: \"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad\") " Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.536603 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-kube-api-access-ccw45" (OuterVolumeSpecName: "kube-api-access-ccw45") pod "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" (UID: "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad"). InnerVolumeSpecName "kube-api-access-ccw45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.575002 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" (UID: "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.575454 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" (UID: "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.588641 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-config-data" (OuterVolumeSpecName: "config-data") pod "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" (UID: "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.600623 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-scripts" (OuterVolumeSpecName: "scripts") pod "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" (UID: "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.600674 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" (UID: "7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.614257 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.614296 4811 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.614305 4811 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.614316 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.614324 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccw45\" (UniqueName: \"kubernetes.io/projected/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-kube-api-access-ccw45\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:45 crc kubenswrapper[4811]: I0128 16:06:45.614332 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:45 crc kubenswrapper[4811]: E0128 16:06:45.725615 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99a30150_2d2e_420e_aba9_d7d99b62a612.slice\": RecentStats: unable to find data in memory cache]" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.310780 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b5lhm" event={"ID":"7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad","Type":"ContainerDied","Data":"6db4819d4e51173d77f9f93efc6e2939df8fa1cddb225eb73a62b7fa7f6101fc"} Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.310826 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6db4819d4e51173d77f9f93efc6e2939df8fa1cddb225eb73a62b7fa7f6101fc" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.310850 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b5lhm" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.573961 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-b5lhm"] Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.582901 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-b5lhm"] Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.669518 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gff7z"] Jan 28 16:06:46 crc kubenswrapper[4811]: E0128 16:06:46.669882 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerName="init" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.669897 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerName="init" Jan 28 16:06:46 crc kubenswrapper[4811]: E0128 16:06:46.669913 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a5ff06a-d87b-4f48-b51f-328830acba2b" containerName="init" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.669919 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a5ff06a-d87b-4f48-b51f-328830acba2b" containerName="init" Jan 28 16:06:46 crc kubenswrapper[4811]: E0128 16:06:46.669931 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" containerName="keystone-bootstrap" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.669937 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" containerName="keystone-bootstrap" Jan 28 16:06:46 crc kubenswrapper[4811]: E0128 16:06:46.669958 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerName="dnsmasq-dns" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.669964 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerName="dnsmasq-dns" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.670111 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a5ff06a-d87b-4f48-b51f-328830acba2b" containerName="init" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.670130 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="76dc5dd7-5559-47a6-9d0d-c576172cf695" containerName="dnsmasq-dns" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.670142 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" containerName="keystone-bootstrap" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.670669 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.672503 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ns2b7" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.672928 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.674129 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.674549 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.675364 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.682919 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gff7z"] Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.737896 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-credential-keys\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.737940 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-combined-ca-bundle\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.738018 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-fernet-keys\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.738143 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np9xg\" (UniqueName: \"kubernetes.io/projected/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-kube-api-access-np9xg\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.738220 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-scripts\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.738396 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-config-data\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.839701 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np9xg\" (UniqueName: \"kubernetes.io/projected/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-kube-api-access-np9xg\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.839774 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-scripts\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.839860 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-config-data\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.839953 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-credential-keys\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.839993 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-combined-ca-bundle\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.840034 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-fernet-keys\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.844172 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-scripts\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.845135 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-credential-keys\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.848982 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-config-data\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.849026 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-fernet-keys\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.854560 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-combined-ca-bundle\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.857538 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np9xg\" (UniqueName: \"kubernetes.io/projected/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-kube-api-access-np9xg\") pod \"keystone-bootstrap-gff7z\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:46 crc kubenswrapper[4811]: I0128 16:06:46.988940 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:06:47 crc kubenswrapper[4811]: E0128 16:06:47.899782 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5a548c25fe3d02f7a042cb0a6d28fc8039a34c4a3b3d07aadda4aba3a926e777" Jan 28 16:06:47 crc kubenswrapper[4811]: E0128 16:06:47.900083 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5a548c25fe3d02f7a042cb0a6d28fc8039a34c4a3b3d07aadda4aba3a926e777,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n76hf8h578h565hf5h56fh6fhc7h664h686h578h5d9h5ddhd9hbch5c4h66h55bh55dh6h698hfdh5fh66h76h96h66ch7dhd8h5b9h56bhdbq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qm8fn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(77a96295-92e1-4ed4-b6dc-352fda7dd5a5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:06:48 crc kubenswrapper[4811]: I0128 16:06:48.355233 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad" path="/var/lib/kubelet/pods/7570ddf4-9d9e-48d8-a6ae-cf4fdcade2ad/volumes" Jan 28 16:06:54 crc kubenswrapper[4811]: I0128 16:06:54.873657 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: i/o timeout" Jan 28 16:06:55 crc kubenswrapper[4811]: E0128 16:06:55.921999 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99a30150_2d2e_420e_aba9_d7d99b62a612.slice\": RecentStats: unable to find data in memory cache]" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.601057 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.661187 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-config\") pod \"a3730224-7295-4fea-bd2e-67cad0392474\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.661260 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-nb\") pod \"a3730224-7295-4fea-bd2e-67cad0392474\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.661336 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-sb\") pod \"a3730224-7295-4fea-bd2e-67cad0392474\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.661383 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc\") pod \"a3730224-7295-4fea-bd2e-67cad0392474\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.661525 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thgqs\" (UniqueName: \"kubernetes.io/projected/a3730224-7295-4fea-bd2e-67cad0392474-kube-api-access-thgqs\") pod \"a3730224-7295-4fea-bd2e-67cad0392474\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.668694 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3730224-7295-4fea-bd2e-67cad0392474-kube-api-access-thgqs" (OuterVolumeSpecName: "kube-api-access-thgqs") pod "a3730224-7295-4fea-bd2e-67cad0392474" (UID: "a3730224-7295-4fea-bd2e-67cad0392474"). InnerVolumeSpecName "kube-api-access-thgqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.711705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a3730224-7295-4fea-bd2e-67cad0392474" (UID: "a3730224-7295-4fea-bd2e-67cad0392474"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.714361 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-config" (OuterVolumeSpecName: "config") pod "a3730224-7295-4fea-bd2e-67cad0392474" (UID: "a3730224-7295-4fea-bd2e-67cad0392474"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:59 crc kubenswrapper[4811]: E0128 16:06:59.721542 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc podName:a3730224-7295-4fea-bd2e-67cad0392474 nodeName:}" failed. No retries permitted until 2026-01-28 16:07:00.221486445 +0000 UTC m=+1312.975850048 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc") pod "a3730224-7295-4fea-bd2e-67cad0392474" (UID: "a3730224-7295-4fea-bd2e-67cad0392474") : error deleting /var/lib/kubelet/pods/a3730224-7295-4fea-bd2e-67cad0392474/volume-subpaths: remove /var/lib/kubelet/pods/a3730224-7295-4fea-bd2e-67cad0392474/volume-subpaths: no such file or directory Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.721905 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a3730224-7295-4fea-bd2e-67cad0392474" (UID: "a3730224-7295-4fea-bd2e-67cad0392474"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.762901 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.762935 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.762947 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.762956 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thgqs\" (UniqueName: \"kubernetes.io/projected/a3730224-7295-4fea-bd2e-67cad0392474-kube-api-access-thgqs\") on node \"crc\" DevicePath \"\"" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.875508 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: i/o timeout" Jan 28 16:06:59 crc kubenswrapper[4811]: I0128 16:06:59.876014 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.272078 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc\") pod \"a3730224-7295-4fea-bd2e-67cad0392474\" (UID: \"a3730224-7295-4fea-bd2e-67cad0392474\") " Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.272769 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3730224-7295-4fea-bd2e-67cad0392474" (UID: "a3730224-7295-4fea-bd2e-67cad0392474"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.375008 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3730224-7295-4fea-bd2e-67cad0392474-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.464930 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" event={"ID":"a3730224-7295-4fea-bd2e-67cad0392474","Type":"ContainerDied","Data":"1feff88647b5793f4c6f4d8fdec21ae74f6824239500c5af16031bfd1b40a24c"} Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.465106 4811 scope.go:117] "RemoveContainer" containerID="723c3c1f133020bbd2e12c0cc5bd06cc9712218d2f07e7e49921785e8eb9e001" Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.465347 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-kds67" Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.494310 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-kds67"] Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.502042 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-kds67"] Jan 28 16:07:00 crc kubenswrapper[4811]: E0128 16:07:00.884802 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 28 16:07:00 crc kubenswrapper[4811]: E0128 16:07:00.884979 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l9ljs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-bsvjv_openstack(91de0bcd-1c43-42d8-8582-ba86e33b81c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:07:00 crc kubenswrapper[4811]: E0128 16:07:00.886925 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-bsvjv" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" Jan 28 16:07:00 crc kubenswrapper[4811]: I0128 16:07:00.897409 4811 scope.go:117] "RemoveContainer" containerID="a99e2ad139d7f0c192083433e961eb9268a4901c34a54ff538306acea713ce1b" Jan 28 16:07:00 crc kubenswrapper[4811]: E0128 16:07:00.942588 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Jan 28 16:07:00 crc kubenswrapper[4811]: E0128 16:07:00.942772 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j47qm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-hdczs_openstack(7acf4fe9-fb7e-4260-9976-04deecc1264d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 16:07:00 crc kubenswrapper[4811]: E0128 16:07:00.944214 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-hdczs" podUID="7acf4fe9-fb7e-4260-9976-04deecc1264d" Jan 28 16:07:01 crc kubenswrapper[4811]: I0128 16:07:01.411656 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gff7z"] Jan 28 16:07:01 crc kubenswrapper[4811]: I0128 16:07:01.477608 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gff7z" event={"ID":"6c7795b3-4e53-4e49-b296-8ed8fe0ab751","Type":"ContainerStarted","Data":"a0e67c2ddbd4c7672abae61063155136bee7973376e3621af4d0aa4bf606f534"} Jan 28 16:07:01 crc kubenswrapper[4811]: E0128 16:07:01.481724 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-hdczs" podUID="7acf4fe9-fb7e-4260-9976-04deecc1264d" Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.351611 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3730224-7295-4fea-bd2e-67cad0392474" path="/var/lib/kubelet/pods/a3730224-7295-4fea-bd2e-67cad0392474/volumes" Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.489619 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerStarted","Data":"fbd27eb090cbe712546265aaf2668b722b8738fe1528b40f273affe761f7bd58"} Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.496105 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gff7z" event={"ID":"6c7795b3-4e53-4e49-b296-8ed8fe0ab751","Type":"ContainerStarted","Data":"29ffefe393b5a77bb0a1a8dbf8b34fffc5338c9b9cf20ce0629162a5692975f5"} Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.501965 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nh6rs" event={"ID":"677fa641-ca48-40fe-8b91-40147f1376e5","Type":"ContainerStarted","Data":"f19e5ad3c28bace5d654226d41313f51baa5ae3b141172e290e57574882f79e5"} Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.505586 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4bw6q" event={"ID":"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1","Type":"ContainerStarted","Data":"ad829404888e86ff13f629c270205ae6a1f39869e59a1b682907fd81c45dd990"} Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.529680 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gff7z" podStartSLOduration=16.529657804 podStartE2EDuration="16.529657804s" podCreationTimestamp="2026-01-28 16:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:02.523658362 +0000 UTC m=+1315.278021945" watchObservedRunningTime="2026-01-28 16:07:02.529657804 +0000 UTC m=+1315.284021387" Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.553936 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-4bw6q" podStartSLOduration=3.509451467 podStartE2EDuration="31.55391233s" podCreationTimestamp="2026-01-28 16:06:31 +0000 UTC" firstStartedPulling="2026-01-28 16:06:32.864447343 +0000 UTC m=+1285.618810926" lastFinishedPulling="2026-01-28 16:07:00.908908206 +0000 UTC m=+1313.663271789" observedRunningTime="2026-01-28 16:07:02.540996671 +0000 UTC m=+1315.295360264" watchObservedRunningTime="2026-01-28 16:07:02.55391233 +0000 UTC m=+1315.308275913" Jan 28 16:07:02 crc kubenswrapper[4811]: I0128 16:07:02.567243 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-nh6rs" podStartSLOduration=3.6047921990000003 podStartE2EDuration="31.567223191s" podCreationTimestamp="2026-01-28 16:06:31 +0000 UTC" firstStartedPulling="2026-01-28 16:06:32.964263806 +0000 UTC m=+1285.718627389" lastFinishedPulling="2026-01-28 16:07:00.926694798 +0000 UTC m=+1313.681058381" observedRunningTime="2026-01-28 16:07:02.559898633 +0000 UTC m=+1315.314262246" watchObservedRunningTime="2026-01-28 16:07:02.567223191 +0000 UTC m=+1315.321586774" Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.088152 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.088814 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.088903 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.093499 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b949c0e59097384d76683740d569c1b6a4440ebd12914648594ff426daaad130"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.093585 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://b949c0e59097384d76683740d569c1b6a4440ebd12914648594ff426daaad130" gracePeriod=600 Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.519363 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="b949c0e59097384d76683740d569c1b6a4440ebd12914648594ff426daaad130" exitCode=0 Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.520543 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"b949c0e59097384d76683740d569c1b6a4440ebd12914648594ff426daaad130"} Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.520570 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2"} Jan 28 16:07:03 crc kubenswrapper[4811]: I0128 16:07:03.520586 4811 scope.go:117] "RemoveContainer" containerID="ced9aabb8b9dead9319c14576d709e760cf8c817715a608c130685f02a1c32b6" Jan 28 16:07:06 crc kubenswrapper[4811]: E0128 16:07:06.148395 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99a30150_2d2e_420e_aba9_d7d99b62a612.slice\": RecentStats: unable to find data in memory cache]" Jan 28 16:07:08 crc kubenswrapper[4811]: I0128 16:07:08.590248 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerStarted","Data":"81c518a6fb20f39b657f76792d18ba4cb97001b3ed3cc3dabf2f5abd8077f507"} Jan 28 16:07:09 crc kubenswrapper[4811]: I0128 16:07:09.600975 4811 generic.go:334] "Generic (PLEG): container finished" podID="6c7795b3-4e53-4e49-b296-8ed8fe0ab751" containerID="29ffefe393b5a77bb0a1a8dbf8b34fffc5338c9b9cf20ce0629162a5692975f5" exitCode=0 Jan 28 16:07:09 crc kubenswrapper[4811]: I0128 16:07:09.601304 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gff7z" event={"ID":"6c7795b3-4e53-4e49-b296-8ed8fe0ab751","Type":"ContainerDied","Data":"29ffefe393b5a77bb0a1a8dbf8b34fffc5338c9b9cf20ce0629162a5692975f5"} Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.614636 4811 generic.go:334] "Generic (PLEG): container finished" podID="b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" containerID="ad829404888e86ff13f629c270205ae6a1f39869e59a1b682907fd81c45dd990" exitCode=0 Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.614692 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4bw6q" event={"ID":"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1","Type":"ContainerDied","Data":"ad829404888e86ff13f629c270205ae6a1f39869e59a1b682907fd81c45dd990"} Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.933585 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.988126 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-config-data\") pod \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.988200 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-fernet-keys\") pod \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.988253 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-scripts\") pod \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.988280 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-credential-keys\") pod \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.988345 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-combined-ca-bundle\") pod \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.988378 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np9xg\" (UniqueName: \"kubernetes.io/projected/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-kube-api-access-np9xg\") pod \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\" (UID: \"6c7795b3-4e53-4e49-b296-8ed8fe0ab751\") " Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.994686 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-scripts" (OuterVolumeSpecName: "scripts") pod "6c7795b3-4e53-4e49-b296-8ed8fe0ab751" (UID: "6c7795b3-4e53-4e49-b296-8ed8fe0ab751"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.995645 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6c7795b3-4e53-4e49-b296-8ed8fe0ab751" (UID: "6c7795b3-4e53-4e49-b296-8ed8fe0ab751"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.996122 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6c7795b3-4e53-4e49-b296-8ed8fe0ab751" (UID: "6c7795b3-4e53-4e49-b296-8ed8fe0ab751"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:10 crc kubenswrapper[4811]: I0128 16:07:10.997113 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-kube-api-access-np9xg" (OuterVolumeSpecName: "kube-api-access-np9xg") pod "6c7795b3-4e53-4e49-b296-8ed8fe0ab751" (UID: "6c7795b3-4e53-4e49-b296-8ed8fe0ab751"). InnerVolumeSpecName "kube-api-access-np9xg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.014470 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-config-data" (OuterVolumeSpecName: "config-data") pod "6c7795b3-4e53-4e49-b296-8ed8fe0ab751" (UID: "6c7795b3-4e53-4e49-b296-8ed8fe0ab751"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.020262 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c7795b3-4e53-4e49-b296-8ed8fe0ab751" (UID: "6c7795b3-4e53-4e49-b296-8ed8fe0ab751"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.090313 4811 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.090340 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.090349 4811 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.090358 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.090367 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np9xg\" (UniqueName: \"kubernetes.io/projected/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-kube-api-access-np9xg\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.090377 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7795b3-4e53-4e49-b296-8ed8fe0ab751-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.624301 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gff7z" event={"ID":"6c7795b3-4e53-4e49-b296-8ed8fe0ab751","Type":"ContainerDied","Data":"a0e67c2ddbd4c7672abae61063155136bee7973376e3621af4d0aa4bf606f534"} Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.624342 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0e67c2ddbd4c7672abae61063155136bee7973376e3621af4d0aa4bf606f534" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.624346 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gff7z" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.825556 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7b7bd9558c-kbczp"] Jan 28 16:07:11 crc kubenswrapper[4811]: E0128 16:07:11.826220 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7795b3-4e53-4e49-b296-8ed8fe0ab751" containerName="keystone-bootstrap" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.826236 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7795b3-4e53-4e49-b296-8ed8fe0ab751" containerName="keystone-bootstrap" Jan 28 16:07:11 crc kubenswrapper[4811]: E0128 16:07:11.826256 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.826264 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" Jan 28 16:07:11 crc kubenswrapper[4811]: E0128 16:07:11.826276 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="init" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.826283 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="init" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.826475 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3730224-7295-4fea-bd2e-67cad0392474" containerName="dnsmasq-dns" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.826494 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c7795b3-4e53-4e49-b296-8ed8fe0ab751" containerName="keystone-bootstrap" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.827111 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.836528 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.836565 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.836528 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.836877 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.836991 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ns2b7" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.837457 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.843717 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7b7bd9558c-kbczp"] Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908581 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-credential-keys\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908664 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-fernet-keys\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908700 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-public-tls-certs\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908742 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-scripts\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908844 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwsxl\" (UniqueName: \"kubernetes.io/projected/e10c1c14-4c1a-445e-9c98-1f0b6b334802-kube-api-access-vwsxl\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908874 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-combined-ca-bundle\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908936 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-config-data\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.908981 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-internal-tls-certs\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:11 crc kubenswrapper[4811]: I0128 16:07:11.987888 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4bw6q" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010411 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-scripts\") pod \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010532 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rn2z\" (UniqueName: \"kubernetes.io/projected/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-kube-api-access-8rn2z\") pod \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010572 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-config-data\") pod \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010668 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-logs\") pod \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010700 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-combined-ca-bundle\") pod \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\" (UID: \"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1\") " Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010867 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-scripts\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010940 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwsxl\" (UniqueName: \"kubernetes.io/projected/e10c1c14-4c1a-445e-9c98-1f0b6b334802-kube-api-access-vwsxl\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.010970 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-combined-ca-bundle\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.011024 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-config-data\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.011067 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-internal-tls-certs\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.011109 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-credential-keys\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.011151 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-fernet-keys\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.011178 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-public-tls-certs\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.013808 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-logs" (OuterVolumeSpecName: "logs") pod "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" (UID: "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.018250 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-credential-keys\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.019627 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-kube-api-access-8rn2z" (OuterVolumeSpecName: "kube-api-access-8rn2z") pod "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" (UID: "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1"). InnerVolumeSpecName "kube-api-access-8rn2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.022880 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-fernet-keys\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.023633 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-combined-ca-bundle\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.029909 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-scripts\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.030009 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-internal-tls-certs\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.030326 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-scripts" (OuterVolumeSpecName: "scripts") pod "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" (UID: "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.032640 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-config-data\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.032813 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-public-tls-certs\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.036488 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwsxl\" (UniqueName: \"kubernetes.io/projected/e10c1c14-4c1a-445e-9c98-1f0b6b334802-kube-api-access-vwsxl\") pod \"keystone-7b7bd9558c-kbczp\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.055407 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" (UID: "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.055676 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-config-data" (OuterVolumeSpecName: "config-data") pod "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" (UID: "b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.111933 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.111965 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rn2z\" (UniqueName: \"kubernetes.io/projected/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-kube-api-access-8rn2z\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.111981 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.111993 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.112006 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.155189 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:12 crc kubenswrapper[4811]: E0128 16:07:12.342462 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-bsvjv" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.607156 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7b7bd9558c-kbczp"] Jan 28 16:07:12 crc kubenswrapper[4811]: W0128 16:07:12.611284 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode10c1c14_4c1a_445e_9c98_1f0b6b334802.slice/crio-e90dd6d3975db175f7c87a09a881db70b66a4faad043f22b529970f28a479de6 WatchSource:0}: Error finding container e90dd6d3975db175f7c87a09a881db70b66a4faad043f22b529970f28a479de6: Status 404 returned error can't find the container with id e90dd6d3975db175f7c87a09a881db70b66a4faad043f22b529970f28a479de6 Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.636270 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4bw6q" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.636309 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4bw6q" event={"ID":"b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1","Type":"ContainerDied","Data":"7326795b24f1232d411422ad0f3d3b776ea81c6ba3dd9d21035934215b64cc63"} Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.637907 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7326795b24f1232d411422ad0f3d3b776ea81c6ba3dd9d21035934215b64cc63" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.637930 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b7bd9558c-kbczp" event={"ID":"e10c1c14-4c1a-445e-9c98-1f0b6b334802","Type":"ContainerStarted","Data":"e90dd6d3975db175f7c87a09a881db70b66a4faad043f22b529970f28a479de6"} Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.784324 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7b7c5df494-2hz6m"] Jan 28 16:07:12 crc kubenswrapper[4811]: E0128 16:07:12.785276 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" containerName="placement-db-sync" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.785307 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" containerName="placement-db-sync" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.785571 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" containerName="placement-db-sync" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.786624 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.788597 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ld8qm" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.789379 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.790281 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.790574 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.790813 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.807176 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7b7c5df494-2hz6m"] Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825488 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-internal-tls-certs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825538 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-combined-ca-bundle\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825569 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a4fd4e-29a2-464f-aca7-3f856ed15221-logs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825595 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-public-tls-certs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825650 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-config-data\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45rmw\" (UniqueName: \"kubernetes.io/projected/a1a4fd4e-29a2-464f-aca7-3f856ed15221-kube-api-access-45rmw\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.825702 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-scripts\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.926817 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-config-data\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.926886 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45rmw\" (UniqueName: \"kubernetes.io/projected/a1a4fd4e-29a2-464f-aca7-3f856ed15221-kube-api-access-45rmw\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.926935 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-scripts\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.927005 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-internal-tls-certs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.927035 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-combined-ca-bundle\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.927063 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a4fd4e-29a2-464f-aca7-3f856ed15221-logs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.927098 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-public-tls-certs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.929300 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a4fd4e-29a2-464f-aca7-3f856ed15221-logs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.932300 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-public-tls-certs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.932555 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-internal-tls-certs\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.934248 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-scripts\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.934413 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-combined-ca-bundle\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.941161 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-config-data\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:12 crc kubenswrapper[4811]: I0128 16:07:12.948770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45rmw\" (UniqueName: \"kubernetes.io/projected/a1a4fd4e-29a2-464f-aca7-3f856ed15221-kube-api-access-45rmw\") pod \"placement-7b7c5df494-2hz6m\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:13 crc kubenswrapper[4811]: I0128 16:07:13.115532 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:13 crc kubenswrapper[4811]: I0128 16:07:13.651857 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b7bd9558c-kbczp" event={"ID":"e10c1c14-4c1a-445e-9c98-1f0b6b334802","Type":"ContainerStarted","Data":"1408d806929ba9f320ef606ac871fcb79473917f47486e47e26772ffb153aa37"} Jan 28 16:07:13 crc kubenswrapper[4811]: I0128 16:07:13.653294 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:13 crc kubenswrapper[4811]: I0128 16:07:13.679633 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7b7bd9558c-kbczp" podStartSLOduration=2.679607239 podStartE2EDuration="2.679607239s" podCreationTimestamp="2026-01-28 16:07:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:13.670572765 +0000 UTC m=+1326.424936358" watchObservedRunningTime="2026-01-28 16:07:13.679607239 +0000 UTC m=+1326.433970822" Jan 28 16:07:20 crc kubenswrapper[4811]: I0128 16:07:20.674777 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7b7c5df494-2hz6m"] Jan 28 16:07:20 crc kubenswrapper[4811]: W0128 16:07:20.952816 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1a4fd4e_29a2_464f_aca7_3f856ed15221.slice/crio-4838faa0b0365355d7340151c0f804d22098b22990de0db69cf5432869a05908 WatchSource:0}: Error finding container 4838faa0b0365355d7340151c0f804d22098b22990de0db69cf5432869a05908: Status 404 returned error can't find the container with id 4838faa0b0365355d7340151c0f804d22098b22990de0db69cf5432869a05908 Jan 28 16:07:21 crc kubenswrapper[4811]: I0128 16:07:21.723655 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b7c5df494-2hz6m" event={"ID":"a1a4fd4e-29a2-464f-aca7-3f856ed15221","Type":"ContainerStarted","Data":"4838faa0b0365355d7340151c0f804d22098b22990de0db69cf5432869a05908"} Jan 28 16:07:22 crc kubenswrapper[4811]: E0128 16:07:22.399212 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.739947 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerStarted","Data":"7312fb62f6a38031d41ded229c60796fac95e605b264b4abf3919c8424d0414b"} Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.741138 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.740475 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="sg-core" containerID="cri-o://81c518a6fb20f39b657f76792d18ba4cb97001b3ed3cc3dabf2f5abd8077f507" gracePeriod=30 Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.740510 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="proxy-httpd" containerID="cri-o://7312fb62f6a38031d41ded229c60796fac95e605b264b4abf3919c8424d0414b" gracePeriod=30 Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.740192 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="ceilometer-notification-agent" containerID="cri-o://fbd27eb090cbe712546265aaf2668b722b8738fe1528b40f273affe761f7bd58" gracePeriod=30 Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.741999 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b7c5df494-2hz6m" event={"ID":"a1a4fd4e-29a2-464f-aca7-3f856ed15221","Type":"ContainerStarted","Data":"f494267049de0afa9aadacc83552d047aaa6b559a44a9bc8ab504e3eb508fb84"} Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.753322 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hdczs" event={"ID":"7acf4fe9-fb7e-4260-9976-04deecc1264d","Type":"ContainerStarted","Data":"f78d6d49ea579cf735dcc8dce1604031cf24c3e9ba1882bbee2bb70613b01d91"} Jan 28 16:07:22 crc kubenswrapper[4811]: I0128 16:07:22.808323 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-hdczs" podStartSLOduration=3.021139332 podStartE2EDuration="51.80829893s" podCreationTimestamp="2026-01-28 16:06:31 +0000 UTC" firstStartedPulling="2026-01-28 16:06:32.49777155 +0000 UTC m=+1285.252135133" lastFinishedPulling="2026-01-28 16:07:21.284931148 +0000 UTC m=+1334.039294731" observedRunningTime="2026-01-28 16:07:22.797025995 +0000 UTC m=+1335.551389598" watchObservedRunningTime="2026-01-28 16:07:22.80829893 +0000 UTC m=+1335.562662523" Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.780388 4811 generic.go:334] "Generic (PLEG): container finished" podID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerID="7312fb62f6a38031d41ded229c60796fac95e605b264b4abf3919c8424d0414b" exitCode=0 Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.780889 4811 generic.go:334] "Generic (PLEG): container finished" podID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerID="81c518a6fb20f39b657f76792d18ba4cb97001b3ed3cc3dabf2f5abd8077f507" exitCode=2 Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.780455 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerDied","Data":"7312fb62f6a38031d41ded229c60796fac95e605b264b4abf3919c8424d0414b"} Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.781172 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerDied","Data":"81c518a6fb20f39b657f76792d18ba4cb97001b3ed3cc3dabf2f5abd8077f507"} Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.785056 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b7c5df494-2hz6m" event={"ID":"a1a4fd4e-29a2-464f-aca7-3f856ed15221","Type":"ContainerStarted","Data":"6cba40ddbfcb2754d34d28f3d64877131d7be93e51d4f11947d0c71c0b815b3f"} Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.785458 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.785562 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:23 crc kubenswrapper[4811]: I0128 16:07:23.823102 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7b7c5df494-2hz6m" podStartSLOduration=11.82307245 podStartE2EDuration="11.82307245s" podCreationTimestamp="2026-01-28 16:07:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:23.815266078 +0000 UTC m=+1336.569629661" watchObservedRunningTime="2026-01-28 16:07:23.82307245 +0000 UTC m=+1336.577436073" Jan 28 16:07:27 crc kubenswrapper[4811]: I0128 16:07:27.828844 4811 generic.go:334] "Generic (PLEG): container finished" podID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerID="fbd27eb090cbe712546265aaf2668b722b8738fe1528b40f273affe761f7bd58" exitCode=0 Jan 28 16:07:27 crc kubenswrapper[4811]: I0128 16:07:27.829462 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerDied","Data":"fbd27eb090cbe712546265aaf2668b722b8738fe1528b40f273affe761f7bd58"} Jan 28 16:07:32 crc kubenswrapper[4811]: I0128 16:07:32.264054 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.146:3000/\": dial tcp 10.217.0.146:3000: connect: connection refused" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.063659 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206569 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-sg-core-conf-yaml\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206681 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-config-data\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206754 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-log-httpd\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206814 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm8fn\" (UniqueName: \"kubernetes.io/projected/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-kube-api-access-qm8fn\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206835 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206859 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-run-httpd\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.206878 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-scripts\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.207673 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.207841 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.213590 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-kube-api-access-qm8fn" (OuterVolumeSpecName: "kube-api-access-qm8fn") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "kube-api-access-qm8fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.214009 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-scripts" (OuterVolumeSpecName: "scripts") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.303365 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.307506 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.308417 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle\") pod \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\" (UID: \"77a96295-92e1-4ed4-b6dc-352fda7dd5a5\") " Jan 28 16:07:34 crc kubenswrapper[4811]: W0128 16:07:34.309584 4811 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/77a96295-92e1-4ed4-b6dc-352fda7dd5a5/volumes/kubernetes.io~secret/combined-ca-bundle Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.309630 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.310462 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.310492 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.310505 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm8fn\" (UniqueName: \"kubernetes.io/projected/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-kube-api-access-qm8fn\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.310520 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.310531 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.310542 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.337518 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-config-data" (OuterVolumeSpecName: "config-data") pod "77a96295-92e1-4ed4-b6dc-352fda7dd5a5" (UID: "77a96295-92e1-4ed4-b6dc-352fda7dd5a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:34 crc kubenswrapper[4811]: I0128 16:07:34.412945 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a96295-92e1-4ed4-b6dc-352fda7dd5a5-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.528706 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bsvjv" event={"ID":"91de0bcd-1c43-42d8-8582-ba86e33b81c2","Type":"ContainerStarted","Data":"c766c501d7fef70ac33d495992a7f932c795028bb1028a15f2a477629ccc37be"} Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.531405 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.531360 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77a96295-92e1-4ed4-b6dc-352fda7dd5a5","Type":"ContainerDied","Data":"ea55ed8c92f9871e3e2ed362bf3a0136b29999ee7028849d3c4c091d29fcaa20"} Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.531608 4811 scope.go:117] "RemoveContainer" containerID="7312fb62f6a38031d41ded229c60796fac95e605b264b4abf3919c8424d0414b" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.560844 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-bsvjv" podStartSLOduration=3.539018359 podStartE2EDuration="1m28.560820431s" podCreationTimestamp="2026-01-28 16:06:07 +0000 UTC" firstStartedPulling="2026-01-28 16:06:08.815255145 +0000 UTC m=+1261.569618728" lastFinishedPulling="2026-01-28 16:07:33.837057207 +0000 UTC m=+1346.591420800" observedRunningTime="2026-01-28 16:07:35.557511892 +0000 UTC m=+1348.311875485" watchObservedRunningTime="2026-01-28 16:07:35.560820431 +0000 UTC m=+1348.315184014" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.564497 4811 scope.go:117] "RemoveContainer" containerID="81c518a6fb20f39b657f76792d18ba4cb97001b3ed3cc3dabf2f5abd8077f507" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.634938 4811 scope.go:117] "RemoveContainer" containerID="fbd27eb090cbe712546265aaf2668b722b8738fe1528b40f273affe761f7bd58" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.660980 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.686504 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.703841 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:35 crc kubenswrapper[4811]: E0128 16:07:35.704344 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="sg-core" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.704368 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="sg-core" Jan 28 16:07:35 crc kubenswrapper[4811]: E0128 16:07:35.704390 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="proxy-httpd" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.704397 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="proxy-httpd" Jan 28 16:07:35 crc kubenswrapper[4811]: E0128 16:07:35.704462 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="ceilometer-notification-agent" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.704475 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="ceilometer-notification-agent" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.704663 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="sg-core" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.704703 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="proxy-httpd" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.704716 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" containerName="ceilometer-notification-agent" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.706385 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.710083 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.715503 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.721738 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730779 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdqch\" (UniqueName: \"kubernetes.io/projected/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-kube-api-access-gdqch\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730851 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-scripts\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730882 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-config-data\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730917 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730933 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730969 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-run-httpd\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.730987 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-log-httpd\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.797744 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:35 crc kubenswrapper[4811]: E0128 16:07:35.810665 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-gdqch log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.832884 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.832936 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.832990 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-run-httpd\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.833012 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-log-httpd\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.833103 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdqch\" (UniqueName: \"kubernetes.io/projected/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-kube-api-access-gdqch\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.833166 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-scripts\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.833191 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-config-data\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.833738 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-run-httpd\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.834452 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-log-httpd\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.839297 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.839667 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-scripts\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.839842 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-config-data\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.842450 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:35 crc kubenswrapper[4811]: I0128 16:07:35.859139 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdqch\" (UniqueName: \"kubernetes.io/projected/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-kube-api-access-gdqch\") pod \"ceilometer-0\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " pod="openstack/ceilometer-0" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.352403 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77a96295-92e1-4ed4-b6dc-352fda7dd5a5" path="/var/lib/kubelet/pods/77a96295-92e1-4ed4-b6dc-352fda7dd5a5/volumes" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.541782 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.558450 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.750902 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-run-httpd\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.750985 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-combined-ca-bundle\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751043 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdqch\" (UniqueName: \"kubernetes.io/projected/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-kube-api-access-gdqch\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751098 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-sg-core-conf-yaml\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751144 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-log-httpd\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751172 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-scripts\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751232 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-config-data\") pod \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\" (UID: \"6f349cc2-fb0d-4cf9-8280-f4007c57ebd2\") " Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751260 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751544 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751758 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.751786 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.764818 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.764865 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.764934 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-kube-api-access-gdqch" (OuterVolumeSpecName: "kube-api-access-gdqch") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "kube-api-access-gdqch". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.770777 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-config-data" (OuterVolumeSpecName: "config-data") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.773223 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-scripts" (OuterVolumeSpecName: "scripts") pod "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" (UID: "6f349cc2-fb0d-4cf9-8280-f4007c57ebd2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.853384 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.853914 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.853932 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.853946 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdqch\" (UniqueName: \"kubernetes.io/projected/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-kube-api-access-gdqch\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:36 crc kubenswrapper[4811]: I0128 16:07:36.853961 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.562094 4811 generic.go:334] "Generic (PLEG): container finished" podID="677fa641-ca48-40fe-8b91-40147f1376e5" containerID="f19e5ad3c28bace5d654226d41313f51baa5ae3b141172e290e57574882f79e5" exitCode=0 Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.562188 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nh6rs" event={"ID":"677fa641-ca48-40fe-8b91-40147f1376e5","Type":"ContainerDied","Data":"f19e5ad3c28bace5d654226d41313f51baa5ae3b141172e290e57574882f79e5"} Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.562215 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.649143 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.669004 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.677027 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.678929 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.680909 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.682692 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.685940 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.871841 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-scripts\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.871886 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr4s9\" (UniqueName: \"kubernetes.io/projected/28212031-6289-428a-bbed-eb898d9a07a4-kube-api-access-sr4s9\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.871941 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-config-data\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.871975 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-log-httpd\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.872288 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.872313 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-run-httpd\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.872330 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974290 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-scripts\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974357 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr4s9\" (UniqueName: \"kubernetes.io/projected/28212031-6289-428a-bbed-eb898d9a07a4-kube-api-access-sr4s9\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974474 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-config-data\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974548 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-log-httpd\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974604 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974642 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-run-httpd\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.974664 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.975206 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-run-httpd\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.975515 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-log-httpd\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.979724 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.980020 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-config-data\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.980042 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-scripts\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:37 crc kubenswrapper[4811]: I0128 16:07:37.982651 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.002939 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr4s9\" (UniqueName: \"kubernetes.io/projected/28212031-6289-428a-bbed-eb898d9a07a4-kube-api-access-sr4s9\") pod \"ceilometer-0\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " pod="openstack/ceilometer-0" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.297697 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.350379 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f349cc2-fb0d-4cf9-8280-f4007c57ebd2" path="/var/lib/kubelet/pods/6f349cc2-fb0d-4cf9-8280-f4007c57ebd2/volumes" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.737549 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.823036 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.891928 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc8rd\" (UniqueName: \"kubernetes.io/projected/677fa641-ca48-40fe-8b91-40147f1376e5-kube-api-access-lc8rd\") pod \"677fa641-ca48-40fe-8b91-40147f1376e5\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.898506 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/677fa641-ca48-40fe-8b91-40147f1376e5-kube-api-access-lc8rd" (OuterVolumeSpecName: "kube-api-access-lc8rd") pod "677fa641-ca48-40fe-8b91-40147f1376e5" (UID: "677fa641-ca48-40fe-8b91-40147f1376e5"). InnerVolumeSpecName "kube-api-access-lc8rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.993286 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-db-sync-config-data\") pod \"677fa641-ca48-40fe-8b91-40147f1376e5\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.993368 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-combined-ca-bundle\") pod \"677fa641-ca48-40fe-8b91-40147f1376e5\" (UID: \"677fa641-ca48-40fe-8b91-40147f1376e5\") " Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.993818 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc8rd\" (UniqueName: \"kubernetes.io/projected/677fa641-ca48-40fe-8b91-40147f1376e5-kube-api-access-lc8rd\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:38 crc kubenswrapper[4811]: I0128 16:07:38.997643 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "677fa641-ca48-40fe-8b91-40147f1376e5" (UID: "677fa641-ca48-40fe-8b91-40147f1376e5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.019251 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "677fa641-ca48-40fe-8b91-40147f1376e5" (UID: "677fa641-ca48-40fe-8b91-40147f1376e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.095803 4811 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.095846 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677fa641-ca48-40fe-8b91-40147f1376e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.579216 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerStarted","Data":"43aed9f889c4172baaaf63fb610f2a0beac8c34acd1d49f35a8fa71bed9f6d23"} Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.582130 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nh6rs" event={"ID":"677fa641-ca48-40fe-8b91-40147f1376e5","Type":"ContainerDied","Data":"fa4f5e876aa27c3102b8369872e598c2a55b9bca6d184304a850bfaebfc2434d"} Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.582167 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa4f5e876aa27c3102b8369872e598c2a55b9bca6d184304a850bfaebfc2434d" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.582228 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nh6rs" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.865946 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-69cfb4f585-8qngp"] Jan 28 16:07:39 crc kubenswrapper[4811]: E0128 16:07:39.866372 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="677fa641-ca48-40fe-8b91-40147f1376e5" containerName="barbican-db-sync" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.866384 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="677fa641-ca48-40fe-8b91-40147f1376e5" containerName="barbican-db-sync" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.866586 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="677fa641-ca48-40fe-8b91-40147f1376e5" containerName="barbican-db-sync" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.867371 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.873552 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.878197 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.878476 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jrwm4" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.903925 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-69cfb4f585-8qngp"] Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.913998 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data-custom\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.914096 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-combined-ca-bundle\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.914143 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0138ad61-fb17-46c7-bdd3-c65f15e2e186-logs\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.914164 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9tmj\" (UniqueName: \"kubernetes.io/projected/0138ad61-fb17-46c7-bdd3-c65f15e2e186-kube-api-access-x9tmj\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.914326 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.922045 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5bd9c6c974-98zpp"] Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.923616 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.932869 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 28 16:07:39 crc kubenswrapper[4811]: I0128 16:07:39.983117 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5bd9c6c974-98zpp"] Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.026343 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-combined-ca-bundle\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.026451 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0138ad61-fb17-46c7-bdd3-c65f15e2e186-logs\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.026477 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9tmj\" (UniqueName: \"kubernetes.io/projected/0138ad61-fb17-46c7-bdd3-c65f15e2e186-kube-api-access-x9tmj\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.026537 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.026573 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data-custom\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.026954 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0138ad61-fb17-46c7-bdd3-c65f15e2e186-logs\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.036836 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.040036 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-combined-ca-bundle\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.041670 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data-custom\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.089108 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9tmj\" (UniqueName: \"kubernetes.io/projected/0138ad61-fb17-46c7-bdd3-c65f15e2e186-kube-api-access-x9tmj\") pod \"barbican-worker-69cfb4f585-8qngp\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.123215 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86bf77c6df-67qct"] Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.124659 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.128246 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1761893b-d911-4596-b0b3-ce2d25d0384f-logs\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.128306 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-combined-ca-bundle\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.128353 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.128408 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tpdv\" (UniqueName: \"kubernetes.io/projected/1761893b-d911-4596-b0b3-ce2d25d0384f-kube-api-access-2tpdv\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.128462 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data-custom\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.135547 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bf77c6df-67qct"] Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.189125 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238192 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data-custom\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238264 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-svc\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238332 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1761893b-d911-4596-b0b3-ce2d25d0384f-logs\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238367 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-swift-storage-0\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238396 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-combined-ca-bundle\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238422 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k6dk\" (UniqueName: \"kubernetes.io/projected/d43f6e19-596f-492a-a1b8-891a70fb2009-kube-api-access-7k6dk\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238465 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-nb\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238488 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-sb\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238511 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-config\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238549 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.238620 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tpdv\" (UniqueName: \"kubernetes.io/projected/1761893b-d911-4596-b0b3-ce2d25d0384f-kube-api-access-2tpdv\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.244904 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1761893b-d911-4596-b0b3-ce2d25d0384f-logs\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.253190 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-combined-ca-bundle\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.254364 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data-custom\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.273032 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tpdv\" (UniqueName: \"kubernetes.io/projected/1761893b-d911-4596-b0b3-ce2d25d0384f-kube-api-access-2tpdv\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.275443 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data\") pod \"barbican-keystone-listener-5bd9c6c974-98zpp\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.297423 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-56f5cb95d8-wm7lj"] Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.310933 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.318340 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.325087 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56f5cb95d8-wm7lj"] Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356080 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc42d256-a716-4ae2-8107-78e247e0efe1-logs\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356225 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcpn7\" (UniqueName: \"kubernetes.io/projected/dc42d256-a716-4ae2-8107-78e247e0efe1-kube-api-access-qcpn7\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356279 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-swift-storage-0\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356314 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k6dk\" (UniqueName: \"kubernetes.io/projected/d43f6e19-596f-492a-a1b8-891a70fb2009-kube-api-access-7k6dk\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356339 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-nb\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356370 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-sb\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-config\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356460 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-combined-ca-bundle\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356565 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data-custom\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356671 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.356747 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-svc\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.357585 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-svc\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.358117 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-nb\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.359851 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-swift-storage-0\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.360145 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-config\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.372667 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-sb\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.389210 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k6dk\" (UniqueName: \"kubernetes.io/projected/d43f6e19-596f-492a-a1b8-891a70fb2009-kube-api-access-7k6dk\") pod \"dnsmasq-dns-86bf77c6df-67qct\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.470944 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc42d256-a716-4ae2-8107-78e247e0efe1-logs\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.471052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcpn7\" (UniqueName: \"kubernetes.io/projected/dc42d256-a716-4ae2-8107-78e247e0efe1-kube-api-access-qcpn7\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.471147 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-combined-ca-bundle\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.471569 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc42d256-a716-4ae2-8107-78e247e0efe1-logs\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.472275 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data-custom\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.472399 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.476041 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data-custom\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.477661 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-combined-ca-bundle\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.479390 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.481220 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.499036 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcpn7\" (UniqueName: \"kubernetes.io/projected/dc42d256-a716-4ae2-8107-78e247e0efe1-kube-api-access-qcpn7\") pod \"barbican-api-56f5cb95d8-wm7lj\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.554610 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.602054 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerStarted","Data":"3bf14c9bb656844812c98e50e0de0f55f868753d6514d8d8e59b710f4bcdc136"} Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.602265 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerStarted","Data":"73e4a1707d9fc0168adcc7f4b1504a31e255ed1bbcae448f41aa0cf72655d77d"} Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.678591 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:40 crc kubenswrapper[4811]: I0128 16:07:40.869001 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-69cfb4f585-8qngp"] Jan 28 16:07:40 crc kubenswrapper[4811]: W0128 16:07:40.889387 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0138ad61_fb17_46c7_bdd3_c65f15e2e186.slice/crio-2621fb304b9cbb45829569083d08665b417abddf1cca392888d36f88cabdf03f WatchSource:0}: Error finding container 2621fb304b9cbb45829569083d08665b417abddf1cca392888d36f88cabdf03f: Status 404 returned error can't find the container with id 2621fb304b9cbb45829569083d08665b417abddf1cca392888d36f88cabdf03f Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.141384 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bf77c6df-67qct"] Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.159463 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5bd9c6c974-98zpp"] Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.261293 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56f5cb95d8-wm7lj"] Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.610072 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69cfb4f585-8qngp" event={"ID":"0138ad61-fb17-46c7-bdd3-c65f15e2e186","Type":"ContainerStarted","Data":"2621fb304b9cbb45829569083d08665b417abddf1cca392888d36f88cabdf03f"} Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.611508 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56f5cb95d8-wm7lj" event={"ID":"dc42d256-a716-4ae2-8107-78e247e0efe1","Type":"ContainerStarted","Data":"0bb77a25b0308a749af93fab382369abc173ccbd80449733cd3219c18edb43d2"} Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.611552 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56f5cb95d8-wm7lj" event={"ID":"dc42d256-a716-4ae2-8107-78e247e0efe1","Type":"ContainerStarted","Data":"14ad908cd5e5410480af30585b7f891702eedd0a37ca65fdadd4ea9c741c706a"} Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.612825 4811 generic.go:334] "Generic (PLEG): container finished" podID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerID="b094ad27de35f1ce294418af9b1c80ef93f013124b5d89fc60b9f77efd735918" exitCode=0 Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.612877 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" event={"ID":"d43f6e19-596f-492a-a1b8-891a70fb2009","Type":"ContainerDied","Data":"b094ad27de35f1ce294418af9b1c80ef93f013124b5d89fc60b9f77efd735918"} Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.612892 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" event={"ID":"d43f6e19-596f-492a-a1b8-891a70fb2009","Type":"ContainerStarted","Data":"721f2d4239a688dc7d1af5b70c2339982083a7efce930cedd441280d47f0b918"} Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.614698 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" event={"ID":"1761893b-d911-4596-b0b3-ce2d25d0384f","Type":"ContainerStarted","Data":"84123163ec5e9d54bd21c7668c22aa5ccf1b0c888c2e5be9bbc54afe5777706b"} Jan 28 16:07:41 crc kubenswrapper[4811]: I0128 16:07:41.618793 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerStarted","Data":"eaa89e992c4e659d34cb4bf1ae796f0d7929fb771ca4a1c07f2ddb08f4b88fac"} Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.662815 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56f5cb95d8-wm7lj" event={"ID":"dc42d256-a716-4ae2-8107-78e247e0efe1","Type":"ContainerStarted","Data":"3fb93715e6a8f6b6e9bd8c64de082cb1ad9518c738746cfd26752b8c3d5b2754"} Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.663192 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.663220 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.671271 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" event={"ID":"d43f6e19-596f-492a-a1b8-891a70fb2009","Type":"ContainerStarted","Data":"70328cca8f3d8ad6e4a717aee431938b7fa1d79e6b9d348666afa68c1cad902d"} Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.671901 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.684784 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-56f5cb95d8-wm7lj" podStartSLOduration=2.684771814 podStartE2EDuration="2.684771814s" podCreationTimestamp="2026-01-28 16:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:42.684400144 +0000 UTC m=+1355.438763727" watchObservedRunningTime="2026-01-28 16:07:42.684771814 +0000 UTC m=+1355.439135397" Jan 28 16:07:42 crc kubenswrapper[4811]: I0128 16:07:42.708871 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" podStartSLOduration=2.708849095 podStartE2EDuration="2.708849095s" podCreationTimestamp="2026-01-28 16:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:42.706216854 +0000 UTC m=+1355.460580437" watchObservedRunningTime="2026-01-28 16:07:42.708849095 +0000 UTC m=+1355.463212678" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.156356 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-78bd467b68-h25ls"] Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.158273 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.163924 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.164170 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.170326 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-78bd467b68-h25ls"] Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.327704 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/622ebcc7-b645-4db6-86c0-3546523fb7c7-logs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.327751 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data-custom\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.327790 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-public-tls-certs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.327937 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-combined-ca-bundle\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.327973 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.328097 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7bfb\" (UniqueName: \"kubernetes.io/projected/622ebcc7-b645-4db6-86c0-3546523fb7c7-kube-api-access-n7bfb\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.328213 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-internal-tls-certs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430206 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-combined-ca-bundle\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430257 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430306 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7bfb\" (UniqueName: \"kubernetes.io/projected/622ebcc7-b645-4db6-86c0-3546523fb7c7-kube-api-access-n7bfb\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430356 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-internal-tls-certs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430382 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/622ebcc7-b645-4db6-86c0-3546523fb7c7-logs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430401 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data-custom\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.430447 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-public-tls-certs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.431152 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/622ebcc7-b645-4db6-86c0-3546523fb7c7-logs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.439503 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-internal-tls-certs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.439746 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-combined-ca-bundle\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.440973 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data-custom\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.442631 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.449614 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-public-tls-certs\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.450899 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7bfb\" (UniqueName: \"kubernetes.io/projected/622ebcc7-b645-4db6-86c0-3546523fb7c7-kube-api-access-n7bfb\") pod \"barbican-api-78bd467b68-h25ls\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.480690 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.727104 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" event={"ID":"1761893b-d911-4596-b0b3-ce2d25d0384f","Type":"ContainerStarted","Data":"29723b95d76a6ab96074381d9690ad4cd770a48e64bafab8c23717a36eaddbf2"} Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.754541 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerStarted","Data":"125f8f38afecb18c1fd4bd6f2f694ecc4de5184677dc4d2ac818d93a5d4df832"} Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.756774 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:07:43 crc kubenswrapper[4811]: I0128 16:07:43.758625 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69cfb4f585-8qngp" event={"ID":"0138ad61-fb17-46c7-bdd3-c65f15e2e186","Type":"ContainerStarted","Data":"89af73945fbafecef0415a87ddc471cbf18d61957ff14ef2a2681bac6cbcc415"} Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.092747 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.094753 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.656101419 podStartE2EDuration="7.094737207s" podCreationTimestamp="2026-01-28 16:07:37 +0000 UTC" firstStartedPulling="2026-01-28 16:07:38.739664081 +0000 UTC m=+1351.494027654" lastFinishedPulling="2026-01-28 16:07:43.178299859 +0000 UTC m=+1355.932663442" observedRunningTime="2026-01-28 16:07:43.796388034 +0000 UTC m=+1356.550751617" watchObservedRunningTime="2026-01-28 16:07:44.094737207 +0000 UTC m=+1356.849100790" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.095843 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-78bd467b68-h25ls"] Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.766056 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.768733 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78bd467b68-h25ls" event={"ID":"622ebcc7-b645-4db6-86c0-3546523fb7c7","Type":"ContainerStarted","Data":"f3194b493d5ddf32d8f9d2a436bb598bd51e3c3d501ca80d66d47ad0bff2fffb"} Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.768786 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78bd467b68-h25ls" event={"ID":"622ebcc7-b645-4db6-86c0-3546523fb7c7","Type":"ContainerStarted","Data":"656706d31855991db104dc63417a4a2dad789a13059f74bc9caee3f4a9998a35"} Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.768796 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78bd467b68-h25ls" event={"ID":"622ebcc7-b645-4db6-86c0-3546523fb7c7","Type":"ContainerStarted","Data":"56df644148d58959d35a4958b09728943bd6c0a1ebb2065d7d9466347764758c"} Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.768845 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.768879 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.770902 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69cfb4f585-8qngp" event={"ID":"0138ad61-fb17-46c7-bdd3-c65f15e2e186","Type":"ContainerStarted","Data":"fcc510fe7aab0a06fcaa0e25c25b295dadfdee8c254acdff99aa414590067770"} Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.773267 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" event={"ID":"1761893b-d911-4596-b0b3-ce2d25d0384f","Type":"ContainerStarted","Data":"536a91cb8574f375dfd0ebc16db3f46162b78ee4bd2c334c6a012f5ff6e1ed7a"} Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.808712 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" podStartSLOduration=3.786949129 podStartE2EDuration="5.808693787s" podCreationTimestamp="2026-01-28 16:07:39 +0000 UTC" firstStartedPulling="2026-01-28 16:07:41.155762949 +0000 UTC m=+1353.910126532" lastFinishedPulling="2026-01-28 16:07:43.177507607 +0000 UTC m=+1355.931871190" observedRunningTime="2026-01-28 16:07:44.80547581 +0000 UTC m=+1357.559839393" watchObservedRunningTime="2026-01-28 16:07:44.808693787 +0000 UTC m=+1357.563057370" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.815916 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.836794 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-69cfb4f585-8qngp" podStartSLOduration=3.555374553 podStartE2EDuration="5.836775297s" podCreationTimestamp="2026-01-28 16:07:39 +0000 UTC" firstStartedPulling="2026-01-28 16:07:40.896226457 +0000 UTC m=+1353.650590040" lastFinishedPulling="2026-01-28 16:07:43.177627201 +0000 UTC m=+1355.931990784" observedRunningTime="2026-01-28 16:07:44.830549819 +0000 UTC m=+1357.584913402" watchObservedRunningTime="2026-01-28 16:07:44.836775297 +0000 UTC m=+1357.591138870" Jan 28 16:07:44 crc kubenswrapper[4811]: I0128 16:07:44.860623 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-78bd467b68-h25ls" podStartSLOduration=1.860604562 podStartE2EDuration="1.860604562s" podCreationTimestamp="2026-01-28 16:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:44.848766342 +0000 UTC m=+1357.603129925" watchObservedRunningTime="2026-01-28 16:07:44.860604562 +0000 UTC m=+1357.614968145" Jan 28 16:07:47 crc kubenswrapper[4811]: E0128 16:07:47.059486 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7acf4fe9_fb7e_4260_9976_04deecc1264d.slice/crio-f78d6d49ea579cf735dcc8dce1604031cf24c3e9ba1882bbee2bb70613b01d91.scope\": RecentStats: unable to find data in memory cache]" Jan 28 16:07:47 crc kubenswrapper[4811]: I0128 16:07:47.797181 4811 generic.go:334] "Generic (PLEG): container finished" podID="7acf4fe9-fb7e-4260-9976-04deecc1264d" containerID="f78d6d49ea579cf735dcc8dce1604031cf24c3e9ba1882bbee2bb70613b01d91" exitCode=0 Jan 28 16:07:47 crc kubenswrapper[4811]: I0128 16:07:47.797233 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hdczs" event={"ID":"7acf4fe9-fb7e-4260-9976-04deecc1264d","Type":"ContainerDied","Data":"f78d6d49ea579cf735dcc8dce1604031cf24c3e9ba1882bbee2bb70613b01d91"} Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.368080 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-56f5cb95d8-wm7lj" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.424362 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.425831 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.428248 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-5c865" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.428271 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.428469 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.435271 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.533056 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config-secret\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.533404 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.533544 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrb4q\" (UniqueName: \"kubernetes.io/projected/712b9902-8ca7-43b1-81ea-125a2093bce6-kube-api-access-lrb4q\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.533630 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.635255 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config-secret\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.635325 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.635404 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrb4q\" (UniqueName: \"kubernetes.io/projected/712b9902-8ca7-43b1-81ea-125a2093bce6-kube-api-access-lrb4q\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.635475 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.637316 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.654313 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config-secret\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.654639 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.657879 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrb4q\" (UniqueName: \"kubernetes.io/projected/712b9902-8ca7-43b1-81ea-125a2093bce6-kube-api-access-lrb4q\") pod \"openstackclient\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.661261 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.662168 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.673107 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.711986 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.716589 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.728359 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:48 crc kubenswrapper[4811]: E0128 16:07:48.777242 4811 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 28 16:07:48 crc kubenswrapper[4811]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_712b9902-8ca7-43b1-81ea-125a2093bce6_0(c8d4dc836740f5fbafccb1ae2ef3510abae8748c4d3ccbc0da24a76c310f036a): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c8d4dc836740f5fbafccb1ae2ef3510abae8748c4d3ccbc0da24a76c310f036a" Netns:"/var/run/netns/950ed0b9-87ca-4d63-a4f5-1df0dfe88edf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=c8d4dc836740f5fbafccb1ae2ef3510abae8748c4d3ccbc0da24a76c310f036a;K8S_POD_UID=712b9902-8ca7-43b1-81ea-125a2093bce6" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/712b9902-8ca7-43b1-81ea-125a2093bce6]: expected pod UID "712b9902-8ca7-43b1-81ea-125a2093bce6" but got "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" from Kube API Jan 28 16:07:48 crc kubenswrapper[4811]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 28 16:07:48 crc kubenswrapper[4811]: > Jan 28 16:07:48 crc kubenswrapper[4811]: E0128 16:07:48.777306 4811 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 28 16:07:48 crc kubenswrapper[4811]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_712b9902-8ca7-43b1-81ea-125a2093bce6_0(c8d4dc836740f5fbafccb1ae2ef3510abae8748c4d3ccbc0da24a76c310f036a): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c8d4dc836740f5fbafccb1ae2ef3510abae8748c4d3ccbc0da24a76c310f036a" Netns:"/var/run/netns/950ed0b9-87ca-4d63-a4f5-1df0dfe88edf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=c8d4dc836740f5fbafccb1ae2ef3510abae8748c4d3ccbc0da24a76c310f036a;K8S_POD_UID=712b9902-8ca7-43b1-81ea-125a2093bce6" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/712b9902-8ca7-43b1-81ea-125a2093bce6]: expected pod UID "712b9902-8ca7-43b1-81ea-125a2093bce6" but got "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" from Kube API Jan 28 16:07:48 crc kubenswrapper[4811]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 28 16:07:48 crc kubenswrapper[4811]: > pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.814054 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.817394 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="712b9902-8ca7-43b1-81ea-125a2093bce6" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.833270 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.840708 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.840761 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6t26\" (UniqueName: \"kubernetes.io/projected/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-kube-api-access-h6t26\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.840945 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config-secret\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.840992 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.942171 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config\") pod \"712b9902-8ca7-43b1-81ea-125a2093bce6\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.942786 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-combined-ca-bundle\") pod \"712b9902-8ca7-43b1-81ea-125a2093bce6\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.942862 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrb4q\" (UniqueName: \"kubernetes.io/projected/712b9902-8ca7-43b1-81ea-125a2093bce6-kube-api-access-lrb4q\") pod \"712b9902-8ca7-43b1-81ea-125a2093bce6\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.942966 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config-secret\") pod \"712b9902-8ca7-43b1-81ea-125a2093bce6\" (UID: \"712b9902-8ca7-43b1-81ea-125a2093bce6\") " Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.943297 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.943324 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6t26\" (UniqueName: \"kubernetes.io/projected/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-kube-api-access-h6t26\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.943418 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config-secret\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.943462 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.948264 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.948454 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "712b9902-8ca7-43b1-81ea-125a2093bce6" (UID: "712b9902-8ca7-43b1-81ea-125a2093bce6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.951064 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.952577 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/712b9902-8ca7-43b1-81ea-125a2093bce6-kube-api-access-lrb4q" (OuterVolumeSpecName: "kube-api-access-lrb4q") pod "712b9902-8ca7-43b1-81ea-125a2093bce6" (UID: "712b9902-8ca7-43b1-81ea-125a2093bce6"). InnerVolumeSpecName "kube-api-access-lrb4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.953811 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config-secret\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.953896 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "712b9902-8ca7-43b1-81ea-125a2093bce6" (UID: "712b9902-8ca7-43b1-81ea-125a2093bce6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.957482 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "712b9902-8ca7-43b1-81ea-125a2093bce6" (UID: "712b9902-8ca7-43b1-81ea-125a2093bce6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:48 crc kubenswrapper[4811]: I0128 16:07:48.964996 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6t26\" (UniqueName: \"kubernetes.io/projected/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-kube-api-access-h6t26\") pod \"openstackclient\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " pod="openstack/openstackclient" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.045119 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.045153 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.045166 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrb4q\" (UniqueName: \"kubernetes.io/projected/712b9902-8ca7-43b1-81ea-125a2093bce6-kube-api-access-lrb4q\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.045180 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/712b9902-8ca7-43b1-81ea-125a2093bce6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.132178 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.136032 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hdczs" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.251241 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j47qm\" (UniqueName: \"kubernetes.io/projected/7acf4fe9-fb7e-4260-9976-04deecc1264d-kube-api-access-j47qm\") pod \"7acf4fe9-fb7e-4260-9976-04deecc1264d\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.251659 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-db-sync-config-data\") pod \"7acf4fe9-fb7e-4260-9976-04deecc1264d\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.251826 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7acf4fe9-fb7e-4260-9976-04deecc1264d-etc-machine-id\") pod \"7acf4fe9-fb7e-4260-9976-04deecc1264d\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.251866 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-combined-ca-bundle\") pod \"7acf4fe9-fb7e-4260-9976-04deecc1264d\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.251894 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-config-data\") pod \"7acf4fe9-fb7e-4260-9976-04deecc1264d\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.251925 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-scripts\") pod \"7acf4fe9-fb7e-4260-9976-04deecc1264d\" (UID: \"7acf4fe9-fb7e-4260-9976-04deecc1264d\") " Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.252195 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7acf4fe9-fb7e-4260-9976-04deecc1264d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7acf4fe9-fb7e-4260-9976-04deecc1264d" (UID: "7acf4fe9-fb7e-4260-9976-04deecc1264d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.252843 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7acf4fe9-fb7e-4260-9976-04deecc1264d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.257480 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7acf4fe9-fb7e-4260-9976-04deecc1264d-kube-api-access-j47qm" (OuterVolumeSpecName: "kube-api-access-j47qm") pod "7acf4fe9-fb7e-4260-9976-04deecc1264d" (UID: "7acf4fe9-fb7e-4260-9976-04deecc1264d"). InnerVolumeSpecName "kube-api-access-j47qm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.257730 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-scripts" (OuterVolumeSpecName: "scripts") pod "7acf4fe9-fb7e-4260-9976-04deecc1264d" (UID: "7acf4fe9-fb7e-4260-9976-04deecc1264d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.270216 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7acf4fe9-fb7e-4260-9976-04deecc1264d" (UID: "7acf4fe9-fb7e-4260-9976-04deecc1264d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.302777 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7acf4fe9-fb7e-4260-9976-04deecc1264d" (UID: "7acf4fe9-fb7e-4260-9976-04deecc1264d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.333165 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-config-data" (OuterVolumeSpecName: "config-data") pod "7acf4fe9-fb7e-4260-9976-04deecc1264d" (UID: "7acf4fe9-fb7e-4260-9976-04deecc1264d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.354577 4811 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.354865 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.354876 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.354884 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7acf4fe9-fb7e-4260-9976-04deecc1264d-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.354892 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j47qm\" (UniqueName: \"kubernetes.io/projected/7acf4fe9-fb7e-4260-9976-04deecc1264d-kube-api-access-j47qm\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.585737 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.822005 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hdczs" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.822000 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hdczs" event={"ID":"7acf4fe9-fb7e-4260-9976-04deecc1264d","Type":"ContainerDied","Data":"b410cd4b2d52947c3a819e8421138cf44d3e13b74012377d94031bf894b6ec20"} Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.822150 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b410cd4b2d52947c3a819e8421138cf44d3e13b74012377d94031bf894b6ec20" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.822966 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.822999 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc","Type":"ContainerStarted","Data":"1e0fe44fdf95622ff43ebf2e8f745ce8186f251aa3f137b8cb63d15c1640a1b0"} Jan 28 16:07:49 crc kubenswrapper[4811]: I0128 16:07:49.825818 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="712b9902-8ca7-43b1-81ea-125a2093bce6" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.150952 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:07:50 crc kubenswrapper[4811]: E0128 16:07:50.152007 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7acf4fe9-fb7e-4260-9976-04deecc1264d" containerName="cinder-db-sync" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.152034 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7acf4fe9-fb7e-4260-9976-04deecc1264d" containerName="cinder-db-sync" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.152534 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7acf4fe9-fb7e-4260-9976-04deecc1264d" containerName="cinder-db-sync" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.154236 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.165308 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.165414 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.165696 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.166956 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qd4pr" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.263972 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.299232 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.299318 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-scripts\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.299342 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.299362 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.299442 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjkct\" (UniqueName: \"kubernetes.io/projected/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-kube-api-access-pjkct\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.299469 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.339620 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bf77c6df-67qct"] Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.339904 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="dnsmasq-dns" containerID="cri-o://70328cca8f3d8ad6e4a717aee431938b7fa1d79e6b9d348666afa68c1cad902d" gracePeriod=10 Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.375640 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="712b9902-8ca7-43b1-81ea-125a2093bce6" path="/var/lib/kubelet/pods/712b9902-8ca7-43b1-81ea-125a2093bce6/volumes" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.376044 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76bb4997-f96mf"] Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.377359 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.377668 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.388756 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76bb4997-f96mf"] Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.402554 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjkct\" (UniqueName: \"kubernetes.io/projected/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-kube-api-access-pjkct\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.402619 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.402672 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.402732 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-scripts\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.402767 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.402794 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.410755 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.418736 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-scripts\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.419141 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.424032 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.439968 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.441307 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.444593 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.444840 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.463170 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.468308 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjkct\" (UniqueName: \"kubernetes.io/projected/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-kube-api-access-pjkct\") pod \"cinder-scheduler-0\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.481635 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: connect: connection refused" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.508486 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwsp4\" (UniqueName: \"kubernetes.io/projected/48668d6d-46c0-4e4c-91c5-2fdab113cd49-kube-api-access-lwsp4\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.508638 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-svc\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.508921 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-config\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.508991 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-nb\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.509014 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-sb\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.509073 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-swift-storage-0\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.521440 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.612359 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-scripts\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613405 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data-custom\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613500 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613595 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-config\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613742 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-nb\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613816 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-sb\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613884 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdd2f\" (UniqueName: \"kubernetes.io/projected/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-kube-api-access-qdd2f\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.613967 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-swift-storage-0\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.614090 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwsp4\" (UniqueName: \"kubernetes.io/projected/48668d6d-46c0-4e4c-91c5-2fdab113cd49-kube-api-access-lwsp4\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.614165 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-svc\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.614240 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.614308 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-logs\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.615986 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-config\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.616579 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-nb\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.617151 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-sb\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.617817 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-swift-storage-0\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.618922 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-svc\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.659496 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwsp4\" (UniqueName: \"kubernetes.io/projected/48668d6d-46c0-4e4c-91c5-2fdab113cd49-kube-api-access-lwsp4\") pod \"dnsmasq-dns-76bb4997-f96mf\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.715454 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-logs\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.715957 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-scripts\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.715992 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data-custom\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.716016 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.716086 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.716151 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdd2f\" (UniqueName: \"kubernetes.io/projected/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-kube-api-access-qdd2f\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.716366 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.716481 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.717119 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-logs\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.724764 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.725040 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data-custom\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.727707 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.728001 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-scripts\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.750104 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdd2f\" (UniqueName: \"kubernetes.io/projected/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-kube-api-access-qdd2f\") pod \"cinder-api-0\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.831840 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.837601 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.844960 4811 generic.go:334] "Generic (PLEG): container finished" podID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerID="70328cca8f3d8ad6e4a717aee431938b7fa1d79e6b9d348666afa68c1cad902d" exitCode=0 Jan 28 16:07:50 crc kubenswrapper[4811]: I0128 16:07:50.845007 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" event={"ID":"d43f6e19-596f-492a-a1b8-891a70fb2009","Type":"ContainerDied","Data":"70328cca8f3d8ad6e4a717aee431938b7fa1d79e6b9d348666afa68c1cad902d"} Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.129491 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:07:51 crc kubenswrapper[4811]: W0128 16:07:51.220914 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78c9dbb9_8fc2_4380_9daf_19ea4d28b9f3.slice/crio-1e1f2d7c5a7c623727a2e6adbaf843a4bd7eb994b2bb6e3484db1a5e79c79857 WatchSource:0}: Error finding container 1e1f2d7c5a7c623727a2e6adbaf843a4bd7eb994b2bb6e3484db1a5e79c79857: Status 404 returned error can't find the container with id 1e1f2d7c5a7c623727a2e6adbaf843a4bd7eb994b2bb6e3484db1a5e79c79857 Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.494366 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76bb4997-f96mf"] Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.563544 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.641058 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.743411 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-config\") pod \"d43f6e19-596f-492a-a1b8-891a70fb2009\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.744073 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7k6dk\" (UniqueName: \"kubernetes.io/projected/d43f6e19-596f-492a-a1b8-891a70fb2009-kube-api-access-7k6dk\") pod \"d43f6e19-596f-492a-a1b8-891a70fb2009\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.744165 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-svc\") pod \"d43f6e19-596f-492a-a1b8-891a70fb2009\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.744219 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-swift-storage-0\") pod \"d43f6e19-596f-492a-a1b8-891a70fb2009\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.744297 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-sb\") pod \"d43f6e19-596f-492a-a1b8-891a70fb2009\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.744334 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-nb\") pod \"d43f6e19-596f-492a-a1b8-891a70fb2009\" (UID: \"d43f6e19-596f-492a-a1b8-891a70fb2009\") " Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.758788 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d43f6e19-596f-492a-a1b8-891a70fb2009-kube-api-access-7k6dk" (OuterVolumeSpecName: "kube-api-access-7k6dk") pod "d43f6e19-596f-492a-a1b8-891a70fb2009" (UID: "d43f6e19-596f-492a-a1b8-891a70fb2009"). InnerVolumeSpecName "kube-api-access-7k6dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.794767 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d43f6e19-596f-492a-a1b8-891a70fb2009" (UID: "d43f6e19-596f-492a-a1b8-891a70fb2009"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.849503 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7k6dk\" (UniqueName: \"kubernetes.io/projected/d43f6e19-596f-492a-a1b8-891a70fb2009-kube-api-access-7k6dk\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.849532 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.854333 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d43f6e19-596f-492a-a1b8-891a70fb2009" (UID: "d43f6e19-596f-492a-a1b8-891a70fb2009"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.855443 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d43f6e19-596f-492a-a1b8-891a70fb2009" (UID: "d43f6e19-596f-492a-a1b8-891a70fb2009"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.855811 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-config" (OuterVolumeSpecName: "config") pod "d43f6e19-596f-492a-a1b8-891a70fb2009" (UID: "d43f6e19-596f-492a-a1b8-891a70fb2009"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.861092 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d43f6e19-596f-492a-a1b8-891a70fb2009" (UID: "d43f6e19-596f-492a-a1b8-891a70fb2009"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.867840 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3","Type":"ContainerStarted","Data":"1e1f2d7c5a7c623727a2e6adbaf843a4bd7eb994b2bb6e3484db1a5e79c79857"} Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.872808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a32825c7-f6c1-40aa-b486-c43ff50a1a5a","Type":"ContainerStarted","Data":"d533f97b120d425cf077c8878d3a807a1000e8604b5e533426d63edcf5ce9a54"} Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.882131 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb4997-f96mf" event={"ID":"48668d6d-46c0-4e4c-91c5-2fdab113cd49","Type":"ContainerStarted","Data":"5d79d6265a23eaaeeb6ed5a526de7ef4a963c5796f02b62eac0a961e01ba7a0d"} Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.885868 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" event={"ID":"d43f6e19-596f-492a-a1b8-891a70fb2009","Type":"ContainerDied","Data":"721f2d4239a688dc7d1af5b70c2339982083a7efce930cedd441280d47f0b918"} Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.885908 4811 scope.go:117] "RemoveContainer" containerID="70328cca8f3d8ad6e4a717aee431938b7fa1d79e6b9d348666afa68c1cad902d" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.886080 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf77c6df-67qct" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.942247 4811 scope.go:117] "RemoveContainer" containerID="b094ad27de35f1ce294418af9b1c80ef93f013124b5d89fc60b9f77efd735918" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.951568 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.951594 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.951607 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.951620 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d43f6e19-596f-492a-a1b8-891a70fb2009-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.954179 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bf77c6df-67qct"] Jan 28 16:07:51 crc kubenswrapper[4811]: I0128 16:07:51.962619 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86bf77c6df-67qct"] Jan 28 16:07:52 crc kubenswrapper[4811]: I0128 16:07:52.356276 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" path="/var/lib/kubelet/pods/d43f6e19-596f-492a-a1b8-891a70fb2009/volumes" Jan 28 16:07:52 crc kubenswrapper[4811]: I0128 16:07:52.745010 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:52 crc kubenswrapper[4811]: I0128 16:07:52.922849 4811 generic.go:334] "Generic (PLEG): container finished" podID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerID="c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50" exitCode=0 Jan 28 16:07:52 crc kubenswrapper[4811]: I0128 16:07:52.922948 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb4997-f96mf" event={"ID":"48668d6d-46c0-4e4c-91c5-2fdab113cd49","Type":"ContainerDied","Data":"c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50"} Jan 28 16:07:52 crc kubenswrapper[4811]: I0128 16:07:52.988983 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a32825c7-f6c1-40aa-b486-c43ff50a1a5a","Type":"ContainerStarted","Data":"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9"} Jan 28 16:07:53 crc kubenswrapper[4811]: I0128 16:07:53.084524 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:53 crc kubenswrapper[4811]: I0128 16:07:53.429158 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.006176 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3","Type":"ContainerStarted","Data":"dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d"} Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.016580 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a32825c7-f6c1-40aa-b486-c43ff50a1a5a","Type":"ContainerStarted","Data":"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373"} Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.016770 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api-log" containerID="cri-o://76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9" gracePeriod=30 Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.017063 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.017366 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api" containerID="cri-o://4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373" gracePeriod=30 Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.033212 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb4997-f96mf" event={"ID":"48668d6d-46c0-4e4c-91c5-2fdab113cd49","Type":"ContainerStarted","Data":"66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1"} Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.110836 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76bb4997-f96mf" podStartSLOduration=4.110814371 podStartE2EDuration="4.110814371s" podCreationTimestamp="2026-01-28 16:07:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:54.103469162 +0000 UTC m=+1366.857832745" watchObservedRunningTime="2026-01-28 16:07:54.110814371 +0000 UTC m=+1366.865177954" Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.110943 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.110939564 podStartE2EDuration="4.110939564s" podCreationTimestamp="2026-01-28 16:07:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:54.067010326 +0000 UTC m=+1366.821373919" watchObservedRunningTime="2026-01-28 16:07:54.110939564 +0000 UTC m=+1366.865303147" Jan 28 16:07:54 crc kubenswrapper[4811]: I0128 16:07:54.906793 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.030703 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-logs\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.030884 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdd2f\" (UniqueName: \"kubernetes.io/projected/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-kube-api-access-qdd2f\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.030928 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data-custom\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.030965 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.030994 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-combined-ca-bundle\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.031017 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-etc-machine-id\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.031045 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-scripts\") pod \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\" (UID: \"a32825c7-f6c1-40aa-b486-c43ff50a1a5a\") " Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.031175 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-logs" (OuterVolumeSpecName: "logs") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.031393 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.036525 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.041514 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-scripts" (OuterVolumeSpecName: "scripts") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.041565 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-kube-api-access-qdd2f" (OuterVolumeSpecName: "kube-api-access-qdd2f") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "kube-api-access-qdd2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.045603 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.074189 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3","Type":"ContainerStarted","Data":"3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f"} Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.081562 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.092718 4811 generic.go:334] "Generic (PLEG): container finished" podID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerID="4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373" exitCode=0 Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.092966 4811 generic.go:334] "Generic (PLEG): container finished" podID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerID="76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9" exitCode=143 Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.093907 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.094119 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a32825c7-f6c1-40aa-b486-c43ff50a1a5a","Type":"ContainerDied","Data":"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373"} Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.094220 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.094297 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a32825c7-f6c1-40aa-b486-c43ff50a1a5a","Type":"ContainerDied","Data":"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9"} Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.094368 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a32825c7-f6c1-40aa-b486-c43ff50a1a5a","Type":"ContainerDied","Data":"d533f97b120d425cf077c8878d3a807a1000e8604b5e533426d63edcf5ce9a54"} Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.094450 4811 scope.go:117] "RemoveContainer" containerID="4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.110914 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.214244871 podStartE2EDuration="5.110894303s" podCreationTimestamp="2026-01-28 16:07:50 +0000 UTC" firstStartedPulling="2026-01-28 16:07:51.229034781 +0000 UTC m=+1363.983398374" lastFinishedPulling="2026-01-28 16:07:52.125684223 +0000 UTC m=+1364.880047806" observedRunningTime="2026-01-28 16:07:55.098715323 +0000 UTC m=+1367.853078906" watchObservedRunningTime="2026-01-28 16:07:55.110894303 +0000 UTC m=+1367.865257886" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.126136 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data" (OuterVolumeSpecName: "config-data") pod "a32825c7-f6c1-40aa-b486-c43ff50a1a5a" (UID: "a32825c7-f6c1-40aa-b486-c43ff50a1a5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.137011 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdd2f\" (UniqueName: \"kubernetes.io/projected/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-kube-api-access-qdd2f\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.137041 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.137051 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.137062 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.137070 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.137079 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32825c7-f6c1-40aa-b486-c43ff50a1a5a-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.208325 4811 scope.go:117] "RemoveContainer" containerID="76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.270392 4811 scope.go:117] "RemoveContainer" containerID="4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373" Jan 28 16:07:55 crc kubenswrapper[4811]: E0128 16:07:55.270936 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373\": container with ID starting with 4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373 not found: ID does not exist" containerID="4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.270971 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373"} err="failed to get container status \"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373\": rpc error: code = NotFound desc = could not find container \"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373\": container with ID starting with 4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373 not found: ID does not exist" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.270997 4811 scope.go:117] "RemoveContainer" containerID="76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9" Jan 28 16:07:55 crc kubenswrapper[4811]: E0128 16:07:55.271322 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9\": container with ID starting with 76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9 not found: ID does not exist" containerID="76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.271373 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9"} err="failed to get container status \"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9\": rpc error: code = NotFound desc = could not find container \"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9\": container with ID starting with 76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9 not found: ID does not exist" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.271403 4811 scope.go:117] "RemoveContainer" containerID="4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.272537 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373"} err="failed to get container status \"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373\": rpc error: code = NotFound desc = could not find container \"4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373\": container with ID starting with 4ee33ef690e1e83d61b23b0a1771e16acc639924e2c4f26843dacfa0defaa373 not found: ID does not exist" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.272566 4811 scope.go:117] "RemoveContainer" containerID="76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.275359 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9"} err="failed to get container status \"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9\": rpc error: code = NotFound desc = could not find container \"76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9\": container with ID starting with 76626812a6d42f388477a34d5ac923f8932ae0e6510d09bb40f8d850e9e032b9 not found: ID does not exist" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.467617 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.486151 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.501846 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:55 crc kubenswrapper[4811]: E0128 16:07:55.502331 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="init" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502345 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="init" Jan 28 16:07:55 crc kubenswrapper[4811]: E0128 16:07:55.502358 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="dnsmasq-dns" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502364 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="dnsmasq-dns" Jan 28 16:07:55 crc kubenswrapper[4811]: E0128 16:07:55.502377 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api-log" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502383 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api-log" Jan 28 16:07:55 crc kubenswrapper[4811]: E0128 16:07:55.502397 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502403 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502578 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d43f6e19-596f-492a-a1b8-891a70fb2009" containerName="dnsmasq-dns" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502595 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.502605 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" containerName="cinder-api-log" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.503626 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.507379 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.507799 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.508504 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.522856 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.525223 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659330 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659423 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data-custom\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659462 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39ddd574-84b9-4065-9d72-5183fe430d4d-logs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659499 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39ddd574-84b9-4065-9d72-5183fe430d4d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659515 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-scripts\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659539 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659557 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659594 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vj2f\" (UniqueName: \"kubernetes.io/projected/39ddd574-84b9-4065-9d72-5183fe430d4d-kube-api-access-8vj2f\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.659612 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760687 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760736 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vj2f\" (UniqueName: \"kubernetes.io/projected/39ddd574-84b9-4065-9d72-5183fe430d4d-kube-api-access-8vj2f\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760754 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760833 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760890 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data-custom\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760910 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39ddd574-84b9-4065-9d72-5183fe430d4d-logs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760941 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39ddd574-84b9-4065-9d72-5183fe430d4d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760955 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-scripts\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.760976 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.761256 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39ddd574-84b9-4065-9d72-5183fe430d4d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.761765 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39ddd574-84b9-4065-9d72-5183fe430d4d-logs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.766512 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.767949 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-scripts\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.768034 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.768659 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.771684 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data-custom\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.772175 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.782459 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vj2f\" (UniqueName: \"kubernetes.io/projected/39ddd574-84b9-4065-9d72-5183fe430d4d-kube-api-access-8vj2f\") pod \"cinder-api-0\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " pod="openstack/cinder-api-0" Jan 28 16:07:55 crc kubenswrapper[4811]: I0128 16:07:55.831884 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.123444 4811 generic.go:334] "Generic (PLEG): container finished" podID="55b5b721-9672-447d-b4d8-b53b7de5ce2f" containerID="9163fd29bb4d295b9f0469a6d83cf56f2bde17afe8a70e37c7ef65ad1e87b52d" exitCode=0 Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.123529 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vhxrm" event={"ID":"55b5b721-9672-447d-b4d8-b53b7de5ce2f","Type":"ContainerDied","Data":"9163fd29bb4d295b9f0469a6d83cf56f2bde17afe8a70e37c7ef65ad1e87b52d"} Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.238278 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.372308 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a32825c7-f6c1-40aa-b486-c43ff50a1a5a" path="/var/lib/kubelet/pods/a32825c7-f6c1-40aa-b486-c43ff50a1a5a/volumes" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.481035 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.588943 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.657983 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-56f5cb95d8-wm7lj"] Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.658377 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-56f5cb95d8-wm7lj" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api-log" containerID="cri-o://0bb77a25b0308a749af93fab382369abc173ccbd80449733cd3219c18edb43d2" gracePeriod=30 Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.658574 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-56f5cb95d8-wm7lj" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api" containerID="cri-o://3fb93715e6a8f6b6e9bd8c64de082cb1ad9518c738746cfd26752b8c3d5b2754" gracePeriod=30 Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.681037 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-56fd96c5b5-d2r9l"] Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.685001 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.690002 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.690277 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.690456 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.723618 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-56fd96c5b5-d2r9l"] Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789494 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-internal-tls-certs\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789555 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-public-tls-certs\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789616 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-etc-swift\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789640 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws7lj\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-kube-api-access-ws7lj\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789764 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-combined-ca-bundle\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789836 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-log-httpd\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789859 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-config-data\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.789879 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-run-httpd\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-internal-tls-certs\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891469 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-public-tls-certs\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891535 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-etc-swift\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891565 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws7lj\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-kube-api-access-ws7lj\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-combined-ca-bundle\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891682 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-log-httpd\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891744 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-config-data\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.891973 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-run-httpd\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.892506 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-run-httpd\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.892818 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-log-httpd\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.903369 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-public-tls-certs\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.914869 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-etc-swift\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.915323 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-combined-ca-bundle\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.915400 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-config-data\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.917293 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-internal-tls-certs\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:56 crc kubenswrapper[4811]: I0128 16:07:56.920477 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws7lj\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-kube-api-access-ws7lj\") pod \"swift-proxy-56fd96c5b5-d2r9l\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.028000 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.182394 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerID="0bb77a25b0308a749af93fab382369abc173ccbd80449733cd3219c18edb43d2" exitCode=143 Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.182519 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56f5cb95d8-wm7lj" event={"ID":"dc42d256-a716-4ae2-8107-78e247e0efe1","Type":"ContainerDied","Data":"0bb77a25b0308a749af93fab382369abc173ccbd80449733cd3219c18edb43d2"} Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.217314 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"39ddd574-84b9-4065-9d72-5183fe430d4d","Type":"ContainerStarted","Data":"99c00358645b86ca102061603eae530b38d31f10308e163274288cef1468eb1e"} Jan 28 16:07:57 crc kubenswrapper[4811]: E0128 16:07:57.383407 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91de0bcd_1c43_42d8_8582_ba86e33b81c2.slice/crio-conmon-c766c501d7fef70ac33d495992a7f932c795028bb1028a15f2a477629ccc37be.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91de0bcd_1c43_42d8_8582_ba86e33b81c2.slice/crio-c766c501d7fef70ac33d495992a7f932c795028bb1028a15f2a477629ccc37be.scope\": RecentStats: unable to find data in memory cache]" Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.732372 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.838275 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.839306 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-central-agent" containerID="cri-o://73e4a1707d9fc0168adcc7f4b1504a31e255ed1bbcae448f41aa0cf72655d77d" gracePeriod=30 Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.840993 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="sg-core" containerID="cri-o://eaa89e992c4e659d34cb4bf1ae796f0d7929fb771ca4a1c07f2ddb08f4b88fac" gracePeriod=30 Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.841080 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="proxy-httpd" containerID="cri-o://125f8f38afecb18c1fd4bd6f2f694ecc4de5184677dc4d2ac818d93a5d4df832" gracePeriod=30 Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.841137 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-notification-agent" containerID="cri-o://3bf14c9bb656844812c98e50e0de0f55f868753d6514d8d8e59b710f4bcdc136" gracePeriod=30 Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.851172 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.151:3000/\": EOF" Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.928654 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-combined-ca-bundle\") pod \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.928715 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnvv8\" (UniqueName: \"kubernetes.io/projected/55b5b721-9672-447d-b4d8-b53b7de5ce2f-kube-api-access-qnvv8\") pod \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.928754 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-config\") pod \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\" (UID: \"55b5b721-9672-447d-b4d8-b53b7de5ce2f\") " Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.936747 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55b5b721-9672-447d-b4d8-b53b7de5ce2f-kube-api-access-qnvv8" (OuterVolumeSpecName: "kube-api-access-qnvv8") pod "55b5b721-9672-447d-b4d8-b53b7de5ce2f" (UID: "55b5b721-9672-447d-b4d8-b53b7de5ce2f"). InnerVolumeSpecName "kube-api-access-qnvv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:57 crc kubenswrapper[4811]: I0128 16:07:57.972795 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55b5b721-9672-447d-b4d8-b53b7de5ce2f" (UID: "55b5b721-9672-447d-b4d8-b53b7de5ce2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.005337 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-config" (OuterVolumeSpecName: "config") pod "55b5b721-9672-447d-b4d8-b53b7de5ce2f" (UID: "55b5b721-9672-447d-b4d8-b53b7de5ce2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.036135 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.036202 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnvv8\" (UniqueName: \"kubernetes.io/projected/55b5b721-9672-447d-b4d8-b53b7de5ce2f-kube-api-access-qnvv8\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.036223 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/55b5b721-9672-447d-b4d8-b53b7de5ce2f-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.226933 4811 generic.go:334] "Generic (PLEG): container finished" podID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" containerID="c766c501d7fef70ac33d495992a7f932c795028bb1028a15f2a477629ccc37be" exitCode=0 Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.227013 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bsvjv" event={"ID":"91de0bcd-1c43-42d8-8582-ba86e33b81c2","Type":"ContainerDied","Data":"c766c501d7fef70ac33d495992a7f932c795028bb1028a15f2a477629ccc37be"} Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.231503 4811 generic.go:334] "Generic (PLEG): container finished" podID="28212031-6289-428a-bbed-eb898d9a07a4" containerID="125f8f38afecb18c1fd4bd6f2f694ecc4de5184677dc4d2ac818d93a5d4df832" exitCode=0 Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.231535 4811 generic.go:334] "Generic (PLEG): container finished" podID="28212031-6289-428a-bbed-eb898d9a07a4" containerID="eaa89e992c4e659d34cb4bf1ae796f0d7929fb771ca4a1c07f2ddb08f4b88fac" exitCode=2 Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.231593 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerDied","Data":"125f8f38afecb18c1fd4bd6f2f694ecc4de5184677dc4d2ac818d93a5d4df832"} Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.231618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerDied","Data":"eaa89e992c4e659d34cb4bf1ae796f0d7929fb771ca4a1c07f2ddb08f4b88fac"} Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.233994 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vhxrm" event={"ID":"55b5b721-9672-447d-b4d8-b53b7de5ce2f","Type":"ContainerDied","Data":"8713df3aaa5d14c4f39a8ad7a10591044fb086b3c3dc20dca00a98ed0d2fd962"} Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.234027 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8713df3aaa5d14c4f39a8ad7a10591044fb086b3c3dc20dca00a98ed0d2fd962" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.234096 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vhxrm" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.249219 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"39ddd574-84b9-4065-9d72-5183fe430d4d","Type":"ContainerStarted","Data":"1e0db5f5c865912285b18890df2afbfd3dcd0f3d6c9ece1eee5e5a4b85b96528"} Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.416715 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76bb4997-f96mf"] Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.417170 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76bb4997-f96mf" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerName="dnsmasq-dns" containerID="cri-o://66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1" gracePeriod=10 Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.435871 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.444010 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-775d8bf54b-pxqbl"] Jan 28 16:07:58 crc kubenswrapper[4811]: E0128 16:07:58.444415 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55b5b721-9672-447d-b4d8-b53b7de5ce2f" containerName="neutron-db-sync" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.444453 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="55b5b721-9672-447d-b4d8-b53b7de5ce2f" containerName="neutron-db-sync" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.445923 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="55b5b721-9672-447d-b4d8-b53b7de5ce2f" containerName="neutron-db-sync" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.446845 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.451653 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.452402 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.452751 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nfgxd" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.459309 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.482285 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-775d8bf54b-pxqbl"] Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.506883 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-srbt5"] Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.513684 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.531096 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-srbt5"] Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647213 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-nb\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647328 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-config\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647365 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpggq\" (UniqueName: \"kubernetes.io/projected/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-kube-api-access-jpggq\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647400 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-combined-ca-bundle\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647453 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-httpd-config\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647481 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-sb\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647573 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647635 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-ovndb-tls-certs\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647666 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm7n6\" (UniqueName: \"kubernetes.io/projected/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-kube-api-access-lm7n6\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647743 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-svc\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.647806 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-config\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.666545 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-56fd96c5b5-d2r9l"] Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.750902 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.750957 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-ovndb-tls-certs\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.750984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm7n6\" (UniqueName: \"kubernetes.io/projected/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-kube-api-access-lm7n6\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751037 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-svc\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751080 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-config\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751144 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-nb\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751243 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-config\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751587 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpggq\" (UniqueName: \"kubernetes.io/projected/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-kube-api-access-jpggq\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751632 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-combined-ca-bundle\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751685 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-httpd-config\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.751712 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-sb\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.752331 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.752774 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-config\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.754297 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-nb\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.754886 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-sb\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.755111 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-svc\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.760236 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-httpd-config\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.761947 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-combined-ca-bundle\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.762423 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-ovndb-tls-certs\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.762900 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-config\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.773518 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm7n6\" (UniqueName: \"kubernetes.io/projected/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-kube-api-access-lm7n6\") pod \"dnsmasq-dns-5957fff8cc-srbt5\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.777638 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpggq\" (UniqueName: \"kubernetes.io/projected/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-kube-api-access-jpggq\") pod \"neutron-775d8bf54b-pxqbl\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.801165 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:07:58 crc kubenswrapper[4811]: I0128 16:07:58.905182 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.106002 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.268832 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" event={"ID":"f087b3b8-82d6-4cb1-a883-21a2f0c40580","Type":"ContainerStarted","Data":"2cae1699d6600a62abe5afb6ed86fe1873f7f678d8129a154b95f481914171e1"} Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.270276 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"39ddd574-84b9-4065-9d72-5183fe430d4d","Type":"ContainerStarted","Data":"f0a91b4e447ff1a52135fb776145bb39ff121699ece20c0eab9dbf3f1f9735a1"} Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.271454 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.282336 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-nb\") pod \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.282392 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-sb\") pod \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.282620 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-config\") pod \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.282950 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-svc\") pod \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.283335 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-swift-storage-0\") pod \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.283443 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwsp4\" (UniqueName: \"kubernetes.io/projected/48668d6d-46c0-4e4c-91c5-2fdab113cd49-kube-api-access-lwsp4\") pod \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\" (UID: \"48668d6d-46c0-4e4c-91c5-2fdab113cd49\") " Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.285180 4811 generic.go:334] "Generic (PLEG): container finished" podID="28212031-6289-428a-bbed-eb898d9a07a4" containerID="73e4a1707d9fc0168adcc7f4b1504a31e255ed1bbcae448f41aa0cf72655d77d" exitCode=0 Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.285242 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerDied","Data":"73e4a1707d9fc0168adcc7f4b1504a31e255ed1bbcae448f41aa0cf72655d77d"} Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.290391 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48668d6d-46c0-4e4c-91c5-2fdab113cd49-kube-api-access-lwsp4" (OuterVolumeSpecName: "kube-api-access-lwsp4") pod "48668d6d-46c0-4e4c-91c5-2fdab113cd49" (UID: "48668d6d-46c0-4e4c-91c5-2fdab113cd49"). InnerVolumeSpecName "kube-api-access-lwsp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.313543 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.313521636 podStartE2EDuration="4.313521636s" podCreationTimestamp="2026-01-28 16:07:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:07:59.300780941 +0000 UTC m=+1372.055144524" watchObservedRunningTime="2026-01-28 16:07:59.313521636 +0000 UTC m=+1372.067885239" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.317571 4811 generic.go:334] "Generic (PLEG): container finished" podID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerID="66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1" exitCode=0 Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.317891 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb4997-f96mf" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.318932 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb4997-f96mf" event={"ID":"48668d6d-46c0-4e4c-91c5-2fdab113cd49","Type":"ContainerDied","Data":"66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1"} Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.318975 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb4997-f96mf" event={"ID":"48668d6d-46c0-4e4c-91c5-2fdab113cd49","Type":"ContainerDied","Data":"5d79d6265a23eaaeeb6ed5a526de7ef4a963c5796f02b62eac0a961e01ba7a0d"} Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.319012 4811 scope.go:117] "RemoveContainer" containerID="66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.370383 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "48668d6d-46c0-4e4c-91c5-2fdab113cd49" (UID: "48668d6d-46c0-4e4c-91c5-2fdab113cd49"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.375936 4811 scope.go:117] "RemoveContainer" containerID="c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.385648 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "48668d6d-46c0-4e4c-91c5-2fdab113cd49" (UID: "48668d6d-46c0-4e4c-91c5-2fdab113cd49"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.386219 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.386253 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.386269 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwsp4\" (UniqueName: \"kubernetes.io/projected/48668d6d-46c0-4e4c-91c5-2fdab113cd49-kube-api-access-lwsp4\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.392919 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "48668d6d-46c0-4e4c-91c5-2fdab113cd49" (UID: "48668d6d-46c0-4e4c-91c5-2fdab113cd49"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.397522 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "48668d6d-46c0-4e4c-91c5-2fdab113cd49" (UID: "48668d6d-46c0-4e4c-91c5-2fdab113cd49"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.409162 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-config" (OuterVolumeSpecName: "config") pod "48668d6d-46c0-4e4c-91c5-2fdab113cd49" (UID: "48668d6d-46c0-4e4c-91c5-2fdab113cd49"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.490637 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.490675 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.490689 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48668d6d-46c0-4e4c-91c5-2fdab113cd49-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.491301 4811 scope.go:117] "RemoveContainer" containerID="66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1" Jan 28 16:07:59 crc kubenswrapper[4811]: E0128 16:07:59.492908 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1\": container with ID starting with 66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1 not found: ID does not exist" containerID="66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.492942 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1"} err="failed to get container status \"66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1\": rpc error: code = NotFound desc = could not find container \"66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1\": container with ID starting with 66cff33cdecd5d71847e3d067ebfd0c1c23d080eab92a61d91c8216802639ab1 not found: ID does not exist" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.492969 4811 scope.go:117] "RemoveContainer" containerID="c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50" Jan 28 16:07:59 crc kubenswrapper[4811]: E0128 16:07:59.494304 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50\": container with ID starting with c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50 not found: ID does not exist" containerID="c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.494337 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50"} err="failed to get container status \"c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50\": rpc error: code = NotFound desc = could not find container \"c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50\": container with ID starting with c2d59a1c65e11007472028e2375789a2e4c67ec78ca90dc2124e3bd61e66bb50 not found: ID does not exist" Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.642193 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-775d8bf54b-pxqbl"] Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.704062 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76bb4997-f96mf"] Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.721134 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76bb4997-f96mf"] Jan 28 16:07:59 crc kubenswrapper[4811]: I0128 16:07:59.840242 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-srbt5"] Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.035962 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bsvjv" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.203275 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-config-data\") pod \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.203844 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-db-sync-config-data\") pod \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.204111 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-combined-ca-bundle\") pod \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.204227 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9ljs\" (UniqueName: \"kubernetes.io/projected/91de0bcd-1c43-42d8-8582-ba86e33b81c2-kube-api-access-l9ljs\") pod \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\" (UID: \"91de0bcd-1c43-42d8-8582-ba86e33b81c2\") " Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.211009 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91de0bcd-1c43-42d8-8582-ba86e33b81c2-kube-api-access-l9ljs" (OuterVolumeSpecName: "kube-api-access-l9ljs") pod "91de0bcd-1c43-42d8-8582-ba86e33b81c2" (UID: "91de0bcd-1c43-42d8-8582-ba86e33b81c2"). InnerVolumeSpecName "kube-api-access-l9ljs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.215529 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "91de0bcd-1c43-42d8-8582-ba86e33b81c2" (UID: "91de0bcd-1c43-42d8-8582-ba86e33b81c2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.236023 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91de0bcd-1c43-42d8-8582-ba86e33b81c2" (UID: "91de0bcd-1c43-42d8-8582-ba86e33b81c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.280729 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-config-data" (OuterVolumeSpecName: "config-data") pod "91de0bcd-1c43-42d8-8582-ba86e33b81c2" (UID: "91de0bcd-1c43-42d8-8582-ba86e33b81c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.309578 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.309609 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9ljs\" (UniqueName: \"kubernetes.io/projected/91de0bcd-1c43-42d8-8582-ba86e33b81c2-kube-api-access-l9ljs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.309620 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.309628 4811 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/91de0bcd-1c43-42d8-8582-ba86e33b81c2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.357552 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" path="/var/lib/kubelet/pods/48668d6d-46c0-4e4c-91c5-2fdab113cd49/volumes" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.359375 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bsvjv" event={"ID":"91de0bcd-1c43-42d8-8582-ba86e33b81c2","Type":"ContainerDied","Data":"deee3d5a159584e1e720024ddc5eb4cc71a7bc179a05b7541922fa10a64e0fd3"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.359469 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="deee3d5a159584e1e720024ddc5eb4cc71a7bc179a05b7541922fa10a64e0fd3" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.359560 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bsvjv" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.391586 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-775d8bf54b-pxqbl" event={"ID":"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4","Type":"ContainerStarted","Data":"638bf6a409a2ee8ef91ccc30517ca80008633479d4f2b150788e6d75c6463742"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.391789 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-775d8bf54b-pxqbl" event={"ID":"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4","Type":"ContainerStarted","Data":"dfbdd839f9a4cfb2fa6de12d44ff7dab4ce406a72e592fc45fcb39be4607f3ff"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.411397 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" event={"ID":"f087b3b8-82d6-4cb1-a883-21a2f0c40580","Type":"ContainerStarted","Data":"a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.411460 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" event={"ID":"f087b3b8-82d6-4cb1-a883-21a2f0c40580","Type":"ContainerStarted","Data":"0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.412178 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.412219 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.417534 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerID="3fb93715e6a8f6b6e9bd8c64de082cb1ad9518c738746cfd26752b8c3d5b2754" exitCode=0 Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.417616 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56f5cb95d8-wm7lj" event={"ID":"dc42d256-a716-4ae2-8107-78e247e0efe1","Type":"ContainerDied","Data":"3fb93715e6a8f6b6e9bd8c64de082cb1ad9518c738746cfd26752b8c3d5b2754"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.419544 4811 generic.go:334] "Generic (PLEG): container finished" podID="ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" containerID="ef4e6708f9b25c64b90e0229817d503f87825cea3f4d07bc0c78d9d593436c19" exitCode=0 Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.420514 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" event={"ID":"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb","Type":"ContainerDied","Data":"ef4e6708f9b25c64b90e0229817d503f87825cea3f4d07bc0c78d9d593436c19"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.420540 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" event={"ID":"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb","Type":"ContainerStarted","Data":"0cd393a642ee4e346c04f611e3c4ec148a75d740dd09b7e93e334778f2cd586e"} Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.559046 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" podStartSLOduration=4.559026318 podStartE2EDuration="4.559026318s" podCreationTimestamp="2026-01-28 16:07:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:00.46188612 +0000 UTC m=+1373.216249703" watchObservedRunningTime="2026-01-28 16:08:00.559026318 +0000 UTC m=+1373.313389891" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.660516 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-srbt5"] Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.714482 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-j57cr"] Jan 28 16:08:00 crc kubenswrapper[4811]: E0128 16:08:00.715198 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerName="dnsmasq-dns" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.715215 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerName="dnsmasq-dns" Jan 28 16:08:00 crc kubenswrapper[4811]: E0128 16:08:00.715225 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerName="init" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.715231 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerName="init" Jan 28 16:08:00 crc kubenswrapper[4811]: E0128 16:08:00.715261 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" containerName="glance-db-sync" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.715273 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" containerName="glance-db-sync" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.715512 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="48668d6d-46c0-4e4c-91c5-2fdab113cd49" containerName="dnsmasq-dns" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.715529 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" containerName="glance-db-sync" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.716608 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.741634 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-j57cr"] Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.821680 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-config\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.821741 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwb4g\" (UniqueName: \"kubernetes.io/projected/224a0600-f44e-482b-a6f0-c96aeb7c4e60-kube-api-access-fwb4g\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.821783 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.821812 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.821841 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-swift-storage-0\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.821912 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-svc\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.927367 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-svc\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.927541 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-config\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.927581 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwb4g\" (UniqueName: \"kubernetes.io/projected/224a0600-f44e-482b-a6f0-c96aeb7c4e60-kube-api-access-fwb4g\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.927622 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.927657 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.927687 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-swift-storage-0\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.930764 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-swift-storage-0\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.931413 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-svc\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.933039 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.933778 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.934050 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-config\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:00 crc kubenswrapper[4811]: I0128 16:08:00.948544 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwb4g\" (UniqueName: \"kubernetes.io/projected/224a0600-f44e-482b-a6f0-c96aeb7c4e60-kube-api-access-fwb4g\") pod \"dnsmasq-dns-6b4f5fc4f-j57cr\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.047027 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.059354 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.093061 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.136348 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcpn7\" (UniqueName: \"kubernetes.io/projected/dc42d256-a716-4ae2-8107-78e247e0efe1-kube-api-access-qcpn7\") pod \"dc42d256-a716-4ae2-8107-78e247e0efe1\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.136477 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc42d256-a716-4ae2-8107-78e247e0efe1-logs\") pod \"dc42d256-a716-4ae2-8107-78e247e0efe1\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.136559 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-combined-ca-bundle\") pod \"dc42d256-a716-4ae2-8107-78e247e0efe1\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.136590 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data\") pod \"dc42d256-a716-4ae2-8107-78e247e0efe1\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.136620 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data-custom\") pod \"dc42d256-a716-4ae2-8107-78e247e0efe1\" (UID: \"dc42d256-a716-4ae2-8107-78e247e0efe1\") " Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.139193 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc42d256-a716-4ae2-8107-78e247e0efe1-logs" (OuterVolumeSpecName: "logs") pod "dc42d256-a716-4ae2-8107-78e247e0efe1" (UID: "dc42d256-a716-4ae2-8107-78e247e0efe1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.157362 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc42d256-a716-4ae2-8107-78e247e0efe1-kube-api-access-qcpn7" (OuterVolumeSpecName: "kube-api-access-qcpn7") pod "dc42d256-a716-4ae2-8107-78e247e0efe1" (UID: "dc42d256-a716-4ae2-8107-78e247e0efe1"). InnerVolumeSpecName "kube-api-access-qcpn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.169920 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.178689 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dc42d256-a716-4ae2-8107-78e247e0efe1" (UID: "dc42d256-a716-4ae2-8107-78e247e0efe1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.243885 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcpn7\" (UniqueName: \"kubernetes.io/projected/dc42d256-a716-4ae2-8107-78e247e0efe1-kube-api-access-qcpn7\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.244253 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc42d256-a716-4ae2-8107-78e247e0efe1-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.244267 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.309613 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc42d256-a716-4ae2-8107-78e247e0efe1" (UID: "dc42d256-a716-4ae2-8107-78e247e0efe1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.345408 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.359264 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data" (OuterVolumeSpecName: "config-data") pod "dc42d256-a716-4ae2-8107-78e247e0efe1" (UID: "dc42d256-a716-4ae2-8107-78e247e0efe1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:01 crc kubenswrapper[4811]: E0128 16:08:01.423209 4811 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 28 16:08:01 crc kubenswrapper[4811]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 28 16:08:01 crc kubenswrapper[4811]: > podSandboxID="0cd393a642ee4e346c04f611e3c4ec148a75d740dd09b7e93e334778f2cd586e" Jan 28 16:08:01 crc kubenswrapper[4811]: E0128 16:08:01.423379 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:08:01 crc kubenswrapper[4811]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n585h675h5hdbh86hb5h558h557h685h66dh5c5h567hc9hdbh597h549h696h9ch66dhc6h59bh5bh6ch599h689h5bbh558h5d9h596h699h5ddhbdq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lm7n6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5957fff8cc-srbt5_openstack(ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 28 16:08:01 crc kubenswrapper[4811]: > logger="UnhandledError" Jan 28 16:08:01 crc kubenswrapper[4811]: E0128 16:08:01.426902 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" podUID="ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.444907 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-775d8bf54b-pxqbl" event={"ID":"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4","Type":"ContainerStarted","Data":"22ebe26a0dcd5c950f597ff3f4709d84054b91215a6052d84aa696ebda1c436f"} Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.445132 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.446599 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc42d256-a716-4ae2-8107-78e247e0efe1-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.493074 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-775d8bf54b-pxqbl" podStartSLOduration=3.493050813 podStartE2EDuration="3.493050813s" podCreationTimestamp="2026-01-28 16:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:01.489541748 +0000 UTC m=+1374.243905361" watchObservedRunningTime="2026-01-28 16:08:01.493050813 +0000 UTC m=+1374.247414396" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.515097 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="cinder-scheduler" containerID="cri-o://dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d" gracePeriod=30 Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.515670 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="probe" containerID="cri-o://3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f" gracePeriod=30 Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.516104 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56f5cb95d8-wm7lj" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.516366 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56f5cb95d8-wm7lj" event={"ID":"dc42d256-a716-4ae2-8107-78e247e0efe1","Type":"ContainerDied","Data":"14ad908cd5e5410480af30585b7f891702eedd0a37ca65fdadd4ea9c741c706a"} Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.516461 4811 scope.go:117] "RemoveContainer" containerID="3fb93715e6a8f6b6e9bd8c64de082cb1ad9518c738746cfd26752b8c3d5b2754" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.583156 4811 scope.go:117] "RemoveContainer" containerID="0bb77a25b0308a749af93fab382369abc173ccbd80449733cd3219c18edb43d2" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.633350 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-56f5cb95d8-wm7lj"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.685342 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-56f5cb95d8-wm7lj"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.701501 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:01 crc kubenswrapper[4811]: E0128 16:08:01.702127 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api-log" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.702152 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api-log" Jan 28 16:08:01 crc kubenswrapper[4811]: E0128 16:08:01.702189 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.702198 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.702469 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api-log" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.702503 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.703845 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.707968 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.708185 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.708390 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-vn8wr" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.710374 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.760666 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-config-data\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.760711 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tp66\" (UniqueName: \"kubernetes.io/projected/440e4e34-c373-4182-b3e2-ca4f2de8e825-kube-api-access-2tp66\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.760940 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.760995 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-scripts\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.761078 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.761370 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.761394 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-logs\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.786266 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.788917 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.800701 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.801871 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865626 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865690 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-config-data\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865716 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tp66\" (UniqueName: \"kubernetes.io/projected/440e4e34-c373-4182-b3e2-ca4f2de8e825-kube-api-access-2tp66\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865752 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-scripts\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865773 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865791 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865807 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-logs\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865831 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865856 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865872 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865920 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865938 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-logs\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865955 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.865982 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df4t7\" (UniqueName: \"kubernetes.io/projected/325d2c92-aca2-4660-b328-f0ebf4d03b7d-kube-api-access-df4t7\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.877205 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.877283 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.877930 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-logs\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.878418 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.885279 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-scripts\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.885857 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-config-data\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.911374 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tp66\" (UniqueName: \"kubernetes.io/projected/440e4e34-c373-4182-b3e2-ca4f2de8e825-kube-api-access-2tp66\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.933462 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-j57cr"] Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.940610 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.968867 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.968919 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.968985 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969025 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df4t7\" (UniqueName: \"kubernetes.io/projected/325d2c92-aca2-4660-b328-f0ebf4d03b7d-kube-api-access-df4t7\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969090 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969138 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969156 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-logs\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969640 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-logs\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969813 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.969964 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.976932 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.977829 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:01 crc kubenswrapper[4811]: I0128 16:08:01.978207 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.008037 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df4t7\" (UniqueName: \"kubernetes.io/projected/325d2c92-aca2-4660-b328-f0ebf4d03b7d-kube-api-access-df4t7\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.048418 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.052103 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.121884 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.372629 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" path="/var/lib/kubelet/pods/dc42d256-a716-4ae2-8107-78e247e0efe1/volumes" Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.530242 4811 generic.go:334] "Generic (PLEG): container finished" podID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerID="f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16" exitCode=0 Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.530313 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" event={"ID":"224a0600-f44e-482b-a6f0-c96aeb7c4e60","Type":"ContainerDied","Data":"f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16"} Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.530340 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" event={"ID":"224a0600-f44e-482b-a6f0-c96aeb7c4e60","Type":"ContainerStarted","Data":"23adde7554fd1f09c98e0262c9d68cbfc9469928871b309f8186491c0ca024e4"} Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.769832 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:02 crc kubenswrapper[4811]: W0128 16:08:02.798886 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod440e4e34_c373_4182_b3e2_ca4f2de8e825.slice/crio-dac8ee745d9527e86a660546bf48f52edb7d175036036919bff330dd3b3970c7 WatchSource:0}: Error finding container dac8ee745d9527e86a660546bf48f52edb7d175036036919bff330dd3b3970c7: Status 404 returned error can't find the container with id dac8ee745d9527e86a660546bf48f52edb7d175036036919bff330dd3b3970c7 Jan 28 16:08:02 crc kubenswrapper[4811]: I0128 16:08:02.986037 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.212620 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.407240 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm7n6\" (UniqueName: \"kubernetes.io/projected/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-kube-api-access-lm7n6\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.407365 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.407414 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-sb\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.407588 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-nb\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.407674 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-svc\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.407728 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-config\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.422690 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-kube-api-access-lm7n6" (OuterVolumeSpecName: "kube-api-access-lm7n6") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "kube-api-access-lm7n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.505907 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-config" (OuterVolumeSpecName: "config") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.512000 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.516164 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0\") pod \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\" (UID: \"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.517760 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.517988 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm7n6\" (UniqueName: \"kubernetes.io/projected/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-kube-api-access-lm7n6\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:03 crc kubenswrapper[4811]: W0128 16:08:03.518846 4811 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb/volumes/kubernetes.io~configmap/dns-swift-storage-0 Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.529356 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.541371 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.598973 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.611826 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" (UID: "ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.615334 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"325d2c92-aca2-4660-b328-f0ebf4d03b7d","Type":"ContainerStarted","Data":"0575dcf94db48fca03f541f7efa65eac95b191f6e5ebb45134279537e5e8adee"} Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.630661 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.630687 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.630697 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.630706 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.634012 4811 generic.go:334] "Generic (PLEG): container finished" podID="28212031-6289-428a-bbed-eb898d9a07a4" containerID="3bf14c9bb656844812c98e50e0de0f55f868753d6514d8d8e59b710f4bcdc136" exitCode=0 Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.634078 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerDied","Data":"3bf14c9bb656844812c98e50e0de0f55f868753d6514d8d8e59b710f4bcdc136"} Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.642461 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" event={"ID":"224a0600-f44e-482b-a6f0-c96aeb7c4e60","Type":"ContainerStarted","Data":"d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778"} Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.643560 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.645111 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"440e4e34-c373-4182-b3e2-ca4f2de8e825","Type":"ContainerStarted","Data":"dac8ee745d9527e86a660546bf48f52edb7d175036036919bff330dd3b3970c7"} Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.676541 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" podStartSLOduration=3.676519287 podStartE2EDuration="3.676519287s" podCreationTimestamp="2026-01-28 16:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:03.661793438 +0000 UTC m=+1376.416157021" watchObservedRunningTime="2026-01-28 16:08:03.676519287 +0000 UTC m=+1376.430882870" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.679412 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" event={"ID":"ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb","Type":"ContainerDied","Data":"0cd393a642ee4e346c04f611e3c4ec148a75d740dd09b7e93e334778f2cd586e"} Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.679454 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-srbt5" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.679475 4811 scope.go:117] "RemoveContainer" containerID="ef4e6708f9b25c64b90e0229817d503f87825cea3f4d07bc0c78d9d593436c19" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.698190 4811 generic.go:334] "Generic (PLEG): container finished" podID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerID="3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f" exitCode=0 Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.698268 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3","Type":"ContainerDied","Data":"3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f"} Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.806844 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-srbt5"] Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.824504 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-srbt5"] Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.875787 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.949844 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-config-data\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.949954 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-combined-ca-bundle\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.950035 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-scripts\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.950060 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr4s9\" (UniqueName: \"kubernetes.io/projected/28212031-6289-428a-bbed-eb898d9a07a4-kube-api-access-sr4s9\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.950090 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-log-httpd\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.950137 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-sg-core-conf-yaml\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.950166 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-run-httpd\") pod \"28212031-6289-428a-bbed-eb898d9a07a4\" (UID: \"28212031-6289-428a-bbed-eb898d9a07a4\") " Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.951230 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.951747 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.963023 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-scripts" (OuterVolumeSpecName: "scripts") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:03 crc kubenswrapper[4811]: I0128 16:08:03.968651 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28212031-6289-428a-bbed-eb898d9a07a4-kube-api-access-sr4s9" (OuterVolumeSpecName: "kube-api-access-sr4s9") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "kube-api-access-sr4s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.052839 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.052883 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr4s9\" (UniqueName: \"kubernetes.io/projected/28212031-6289-428a-bbed-eb898d9a07a4-kube-api-access-sr4s9\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.052898 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.052911 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28212031-6289-428a-bbed-eb898d9a07a4-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.078896 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.135925 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-config-data" (OuterVolumeSpecName: "config-data") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.154005 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.154041 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.175597 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28212031-6289-428a-bbed-eb898d9a07a4" (UID: "28212031-6289-428a-bbed-eb898d9a07a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.255634 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28212031-6289-428a-bbed-eb898d9a07a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.268134 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-86846dcc5c-hkg4d"] Jan 28 16:08:04 crc kubenswrapper[4811]: E0128 16:08:04.268710 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" containerName="init" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.268734 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" containerName="init" Jan 28 16:08:04 crc kubenswrapper[4811]: E0128 16:08:04.268809 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="proxy-httpd" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.268821 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="proxy-httpd" Jan 28 16:08:04 crc kubenswrapper[4811]: E0128 16:08:04.268860 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-central-agent" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.268871 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-central-agent" Jan 28 16:08:04 crc kubenswrapper[4811]: E0128 16:08:04.268883 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="sg-core" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.268891 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="sg-core" Jan 28 16:08:04 crc kubenswrapper[4811]: E0128 16:08:04.268905 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-notification-agent" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.268913 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-notification-agent" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.269956 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="sg-core" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.270013 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-central-agent" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.270030 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="proxy-httpd" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.270044 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" containerName="init" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.270085 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="28212031-6289-428a-bbed-eb898d9a07a4" containerName="ceilometer-notification-agent" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.271386 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.277175 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.277175 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.316038 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-86846dcc5c-hkg4d"] Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357114 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-config\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357162 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzlwr\" (UniqueName: \"kubernetes.io/projected/73a0ad8a-2cb9-466e-b3e7-251823ea4528-kube-api-access-rzlwr\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357225 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-public-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357253 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-ovndb-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357272 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-httpd-config\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357306 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-combined-ca-bundle\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.357331 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-internal-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.374329 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb" path="/var/lib/kubelet/pods/ff22cbdb-ac03-4ef6-af1c-1d12b43a8edb/volumes" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.459563 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzlwr\" (UniqueName: \"kubernetes.io/projected/73a0ad8a-2cb9-466e-b3e7-251823ea4528-kube-api-access-rzlwr\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.460655 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-public-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.460727 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-ovndb-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.460757 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-httpd-config\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.460811 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-combined-ca-bundle\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.460937 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-internal-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.461182 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-config\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.463906 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-ovndb-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.465714 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-public-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.467407 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-config\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.470331 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-httpd-config\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.471157 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-internal-tls-certs\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.474186 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-combined-ca-bundle\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.485169 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzlwr\" (UniqueName: \"kubernetes.io/projected/73a0ad8a-2cb9-466e-b3e7-251823ea4528-kube-api-access-rzlwr\") pod \"neutron-86846dcc5c-hkg4d\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.597983 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.770491 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"440e4e34-c373-4182-b3e2-ca4f2de8e825","Type":"ContainerStarted","Data":"beaa34b1bb87634c20de74a307e614a6bdabf08e4b46808857ffe073a08144f5"} Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.770848 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"440e4e34-c373-4182-b3e2-ca4f2de8e825","Type":"ContainerStarted","Data":"453ab40bec669c2e84bf09590e4bf6d64bd007a5dd0bdd682d684aaa4378aad8"} Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.796130 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"325d2c92-aca2-4660-b328-f0ebf4d03b7d","Type":"ContainerStarted","Data":"5907a42ff1711e16c1f205ce4b6ee860d3d1ccb8326e1a557c2cdccf4c68b4b6"} Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.804527 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.80450632 podStartE2EDuration="4.80450632s" podCreationTimestamp="2026-01-28 16:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:04.796185725 +0000 UTC m=+1377.550549318" watchObservedRunningTime="2026-01-28 16:08:04.80450632 +0000 UTC m=+1377.558869893" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.920970 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.921644 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28212031-6289-428a-bbed-eb898d9a07a4","Type":"ContainerDied","Data":"43aed9f889c4172baaaf63fb610f2a0beac8c34acd1d49f35a8fa71bed9f6d23"} Jan 28 16:08:04 crc kubenswrapper[4811]: I0128 16:08:04.921689 4811 scope.go:117] "RemoveContainer" containerID="125f8f38afecb18c1fd4bd6f2f694ecc4de5184677dc4d2ac818d93a5d4df832" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.012033 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.031290 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.047951 4811 scope.go:117] "RemoveContainer" containerID="eaa89e992c4e659d34cb4bf1ae796f0d7929fb771ca4a1c07f2ddb08f4b88fac" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.089418 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.096150 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.105238 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.110397 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.126494 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.129707 4811 scope.go:117] "RemoveContainer" containerID="3bf14c9bb656844812c98e50e0de0f55f868753d6514d8d8e59b710f4bcdc136" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.232407 4811 scope.go:117] "RemoveContainer" containerID="73e4a1707d9fc0168adcc7f4b1504a31e255ed1bbcae448f41aa0cf72655d77d" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.289349 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.290051 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z78sx\" (UniqueName: \"kubernetes.io/projected/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-kube-api-access-z78sx\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.290392 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-scripts\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.290679 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-log-httpd\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.290799 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.291309 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-config-data\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.291446 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-run-httpd\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.395706 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-scripts\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.395813 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-log-httpd\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.395839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.395923 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-config-data\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.396003 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-run-httpd\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.396211 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.396367 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z78sx\" (UniqueName: \"kubernetes.io/projected/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-kube-api-access-z78sx\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.401567 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-log-httpd\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.403177 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-run-httpd\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.407403 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-scripts\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.414041 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.430525 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.431607 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-config-data\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.437915 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z78sx\" (UniqueName: \"kubernetes.io/projected/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-kube-api-access-z78sx\") pod \"ceilometer-0\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.496999 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.508900 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.554941 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-86846dcc5c-hkg4d"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.627871 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.682052 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-56f5cb95d8-wm7lj" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.682013 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-56f5cb95d8-wm7lj" podUID="dc42d256-a716-4ae2-8107-78e247e0efe1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.952837 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"325d2c92-aca2-4660-b328-f0ebf4d03b7d","Type":"ContainerStarted","Data":"bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf"} Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.962493 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86846dcc5c-hkg4d" event={"ID":"73a0ad8a-2cb9-466e-b3e7-251823ea4528","Type":"ContainerStarted","Data":"51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b"} Jan 28 16:08:05 crc kubenswrapper[4811]: I0128 16:08:05.962539 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86846dcc5c-hkg4d" event={"ID":"73a0ad8a-2cb9-466e-b3e7-251823ea4528","Type":"ContainerStarted","Data":"94aa501eaff1bffc80939872bbbba936d439b2e914bad0e1f78b161b900664ae"} Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.018923 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.018902062 podStartE2EDuration="6.018902062s" podCreationTimestamp="2026-01-28 16:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:06.009787695 +0000 UTC m=+1378.764151278" watchObservedRunningTime="2026-01-28 16:08:06.018902062 +0000 UTC m=+1378.773265645" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.233480 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.382890 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28212031-6289-428a-bbed-eb898d9a07a4" path="/var/lib/kubelet/pods/28212031-6289-428a-bbed-eb898d9a07a4/volumes" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.727320 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.815871 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-combined-ca-bundle\") pod \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.815959 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-etc-machine-id\") pod \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.816062 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-scripts\") pod \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.816096 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data\") pod \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.816236 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjkct\" (UniqueName: \"kubernetes.io/projected/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-kube-api-access-pjkct\") pod \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.816304 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data-custom\") pod \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\" (UID: \"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3\") " Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.825568 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" (UID: "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.826380 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-scripts" (OuterVolumeSpecName: "scripts") pod "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" (UID: "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.827376 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" (UID: "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.833894 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-kube-api-access-pjkct" (OuterVolumeSpecName: "kube-api-access-pjkct") pod "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" (UID: "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3"). InnerVolumeSpecName "kube-api-access-pjkct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.919618 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.919653 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjkct\" (UniqueName: \"kubernetes.io/projected/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-kube-api-access-pjkct\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.919665 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.919673 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:06 crc kubenswrapper[4811]: I0128 16:08:06.921253 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" (UID: "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.000809 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data" (OuterVolumeSpecName: "config-data") pod "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" (UID: "78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.001167 4811 generic.go:334] "Generic (PLEG): container finished" podID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerID="dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d" exitCode=0 Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.001226 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3","Type":"ContainerDied","Data":"dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d"} Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.001256 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3","Type":"ContainerDied","Data":"1e1f2d7c5a7c623727a2e6adbaf843a4bd7eb994b2bb6e3484db1a5e79c79857"} Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.001292 4811 scope.go:117] "RemoveContainer" containerID="3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.001506 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.016325 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerStarted","Data":"3a35fd916ec1b60958aa95b6d78e01ef95505ff4a64c72d0785a53a435a13e0d"} Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.025625 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-log" containerID="cri-o://5907a42ff1711e16c1f205ce4b6ee860d3d1ccb8326e1a557c2cdccf4c68b4b6" gracePeriod=30 Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.025865 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.025889 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.026006 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-httpd" containerID="cri-o://bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf" gracePeriod=30 Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.026359 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86846dcc5c-hkg4d" event={"ID":"73a0ad8a-2cb9-466e-b3e7-251823ea4528","Type":"ContainerStarted","Data":"d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605"} Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.028911 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-log" containerID="cri-o://453ab40bec669c2e84bf09590e4bf6d64bd007a5dd0bdd682d684aaa4378aad8" gracePeriod=30 Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.031335 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.031388 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-httpd" containerID="cri-o://beaa34b1bb87634c20de74a307e614a6bdabf08e4b46808857ffe073a08144f5" gracePeriod=30 Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.060508 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.066184 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.088557 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-86846dcc5c-hkg4d" podStartSLOduration=3.088519156 podStartE2EDuration="3.088519156s" podCreationTimestamp="2026-01-28 16:08:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:07.070110867 +0000 UTC m=+1379.824474450" watchObservedRunningTime="2026-01-28 16:08:07.088519156 +0000 UTC m=+1379.842882739" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.185879 4811 scope.go:117] "RemoveContainer" containerID="dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.188383 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.205827 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.243526 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:08:07 crc kubenswrapper[4811]: E0128 16:08:07.243965 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="cinder-scheduler" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.243984 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="cinder-scheduler" Jan 28 16:08:07 crc kubenswrapper[4811]: E0128 16:08:07.244018 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="probe" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.244027 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="probe" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.244220 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="probe" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.244251 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" containerName="cinder-scheduler" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.245196 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.251647 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.254859 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.336238 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.336326 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxmsm\" (UniqueName: \"kubernetes.io/projected/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-kube-api-access-mxmsm\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.336361 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.336393 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-scripts\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.336458 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.336516 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.372607 4811 scope.go:117] "RemoveContainer" containerID="3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f" Jan 28 16:08:07 crc kubenswrapper[4811]: E0128 16:08:07.377422 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f\": container with ID starting with 3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f not found: ID does not exist" containerID="3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.377491 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f"} err="failed to get container status \"3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f\": rpc error: code = NotFound desc = could not find container \"3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f\": container with ID starting with 3d2a28188f8937248b4f1b05bda011c3bccbaec5c7f66c1a9028d1062e2ae26f not found: ID does not exist" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.377534 4811 scope.go:117] "RemoveContainer" containerID="dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d" Jan 28 16:08:07 crc kubenswrapper[4811]: E0128 16:08:07.380782 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d\": container with ID starting with dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d not found: ID does not exist" containerID="dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.380851 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d"} err="failed to get container status \"dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d\": rpc error: code = NotFound desc = could not find container \"dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d\": container with ID starting with dc871bd75052eeee558d1a1018cc059ccd3f3d067be0bc122f35ed3ecd6b0a8d not found: ID does not exist" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.437943 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.438005 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.438161 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.438240 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxmsm\" (UniqueName: \"kubernetes.io/projected/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-kube-api-access-mxmsm\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.438280 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.438321 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-scripts\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.444203 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.452477 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.454041 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.460183 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-scripts\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.461490 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.465702 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxmsm\" (UniqueName: \"kubernetes.io/projected/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-kube-api-access-mxmsm\") pod \"cinder-scheduler-0\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: I0128 16:08:07.720532 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:08:07 crc kubenswrapper[4811]: E0128 16:08:07.742399 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod325d2c92_aca2_4660_b328_f0ebf4d03b7d.slice/crio-conmon-bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod325d2c92_aca2_4660_b328_f0ebf4d03b7d.slice/crio-bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf.scope\": RecentStats: unable to find data in memory cache]" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.100968 4811 generic.go:334] "Generic (PLEG): container finished" podID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerID="bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf" exitCode=0 Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.101395 4811 generic.go:334] "Generic (PLEG): container finished" podID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerID="5907a42ff1711e16c1f205ce4b6ee860d3d1ccb8326e1a557c2cdccf4c68b4b6" exitCode=143 Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.101459 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"325d2c92-aca2-4660-b328-f0ebf4d03b7d","Type":"ContainerDied","Data":"bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf"} Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.101494 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"325d2c92-aca2-4660-b328-f0ebf4d03b7d","Type":"ContainerDied","Data":"5907a42ff1711e16c1f205ce4b6ee860d3d1ccb8326e1a557c2cdccf4c68b4b6"} Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.110003 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerStarted","Data":"1a257c8830ba6170421d2471c2be9db66903601803564b624f1aad7a190187a4"} Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.122841 4811 generic.go:334] "Generic (PLEG): container finished" podID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerID="beaa34b1bb87634c20de74a307e614a6bdabf08e4b46808857ffe073a08144f5" exitCode=0 Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.122883 4811 generic.go:334] "Generic (PLEG): container finished" podID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerID="453ab40bec669c2e84bf09590e4bf6d64bd007a5dd0bdd682d684aaa4378aad8" exitCode=143 Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.124056 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"440e4e34-c373-4182-b3e2-ca4f2de8e825","Type":"ContainerDied","Data":"beaa34b1bb87634c20de74a307e614a6bdabf08e4b46808857ffe073a08144f5"} Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.124087 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"440e4e34-c373-4182-b3e2-ca4f2de8e825","Type":"ContainerDied","Data":"453ab40bec669c2e84bf09590e4bf6d64bd007a5dd0bdd682d684aaa4378aad8"} Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.461755 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.549110 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3" path="/var/lib/kubelet/pods/78c9dbb9-8fc2-4380-9daf-19ea4d28b9f3/volumes" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.589099 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.649719 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tp66\" (UniqueName: \"kubernetes.io/projected/440e4e34-c373-4182-b3e2-ca4f2de8e825-kube-api-access-2tp66\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.650154 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.650197 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-httpd-run\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.650249 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-scripts\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.650319 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-combined-ca-bundle\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.650492 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-config-data\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.650521 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-logs\") pod \"440e4e34-c373-4182-b3e2-ca4f2de8e825\" (UID: \"440e4e34-c373-4182-b3e2-ca4f2de8e825\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.652926 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-logs" (OuterVolumeSpecName: "logs") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.653279 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.660675 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.663454 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.663496 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.663510 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/440e4e34-c373-4182-b3e2-ca4f2de8e825-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.668587 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-scripts" (OuterVolumeSpecName: "scripts") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.676270 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/440e4e34-c373-4182-b3e2-ca4f2de8e825-kube-api-access-2tp66" (OuterVolumeSpecName: "kube-api-access-2tp66") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "kube-api-access-2tp66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765325 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-combined-ca-bundle\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765373 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-scripts\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765443 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df4t7\" (UniqueName: \"kubernetes.io/projected/325d2c92-aca2-4660-b328-f0ebf4d03b7d-kube-api-access-df4t7\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765607 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-httpd-run\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765649 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-config-data\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765689 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.765763 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-logs\") pod \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\" (UID: \"325d2c92-aca2-4660-b328-f0ebf4d03b7d\") " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.766282 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tp66\" (UniqueName: \"kubernetes.io/projected/440e4e34-c373-4182-b3e2-ca4f2de8e825-kube-api-access-2tp66\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.766299 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.766777 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.767239 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-logs" (OuterVolumeSpecName: "logs") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.775763 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.783260 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/325d2c92-aca2-4660-b328-f0ebf4d03b7d-kube-api-access-df4t7" (OuterVolumeSpecName: "kube-api-access-df4t7") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "kube-api-access-df4t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.783565 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-scripts" (OuterVolumeSpecName: "scripts") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.787344 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.787508 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.794667 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:08:08 crc kubenswrapper[4811]: W0128 16:08:08.800898 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff73f2e3_0c2c_4008_bc61_36e65a0ad776.slice/crio-ade24fd04f1a6f0a928556e126cd7ef3c131908931c7e5e2d8a5d2e05e6ccf89 WatchSource:0}: Error finding container ade24fd04f1a6f0a928556e126cd7ef3c131908931c7e5e2d8a5d2e05e6ccf89: Status 404 returned error can't find the container with id ade24fd04f1a6f0a928556e126cd7ef3c131908931c7e5e2d8a5d2e05e6ccf89 Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.854106 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-config-data" (OuterVolumeSpecName: "config-data") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.865576 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-config-data" (OuterVolumeSpecName: "config-data") pod "440e4e34-c373-4182-b3e2-ca4f2de8e825" (UID: "440e4e34-c373-4182-b3e2-ca4f2de8e825"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867839 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df4t7\" (UniqueName: \"kubernetes.io/projected/325d2c92-aca2-4660-b328-f0ebf4d03b7d-kube-api-access-df4t7\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867878 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867891 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867906 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867917 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867942 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867959 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/440e4e34-c373-4182-b3e2-ca4f2de8e825-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867970 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325d2c92-aca2-4660-b328-f0ebf4d03b7d-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.867980 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.880402 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "325d2c92-aca2-4660-b328-f0ebf4d03b7d" (UID: "325d2c92-aca2-4660-b328-f0ebf4d03b7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.917302 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.972770 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325d2c92-aca2-4660-b328-f0ebf4d03b7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:08 crc kubenswrapper[4811]: I0128 16:08:08.972806 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.184142 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerStarted","Data":"50c648e420c733a8c5868c472084552bba3edf2a988519678694481205f719e1"} Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.190784 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"440e4e34-c373-4182-b3e2-ca4f2de8e825","Type":"ContainerDied","Data":"dac8ee745d9527e86a660546bf48f52edb7d175036036919bff330dd3b3970c7"} Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.190838 4811 scope.go:117] "RemoveContainer" containerID="beaa34b1bb87634c20de74a307e614a6bdabf08e4b46808857ffe073a08144f5" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.191008 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.196901 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ff73f2e3-0c2c-4008-bc61-36e65a0ad776","Type":"ContainerStarted","Data":"ade24fd04f1a6f0a928556e126cd7ef3c131908931c7e5e2d8a5d2e05e6ccf89"} Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.243699 4811 scope.go:117] "RemoveContainer" containerID="453ab40bec669c2e84bf09590e4bf6d64bd007a5dd0bdd682d684aaa4378aad8" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.246047 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.255818 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"325d2c92-aca2-4660-b328-f0ebf4d03b7d","Type":"ContainerDied","Data":"0575dcf94db48fca03f541f7efa65eac95b191f6e5ebb45134279537e5e8adee"} Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.263083 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.303883 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.338716 4811 scope.go:117] "RemoveContainer" containerID="bf36b2396177eb8b17044ccd59a6ae17340ec01ba6ae0b8beabcab4e759887cf" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.348571 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: E0128 16:08:09.349219 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-httpd" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349243 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-httpd" Jan 28 16:08:09 crc kubenswrapper[4811]: E0128 16:08:09.349580 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-httpd" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349591 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-httpd" Jan 28 16:08:09 crc kubenswrapper[4811]: E0128 16:08:09.349615 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-log" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349639 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-log" Jan 28 16:08:09 crc kubenswrapper[4811]: E0128 16:08:09.349676 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-log" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349684 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-log" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349932 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-httpd" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349958 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-httpd" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.349976 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" containerName="glance-log" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.350124 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" containerName="glance-log" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.353339 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.356794 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.357482 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.357725 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-vn8wr" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.357852 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.375840 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.409737 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.421309 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.433976 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.435677 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.442980 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.443172 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.457656 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483457 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483568 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483628 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwblz\" (UniqueName: \"kubernetes.io/projected/6632e30b-e20e-44d0-848f-90221b0e31f8-kube-api-access-jwblz\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483662 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483689 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-scripts\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483723 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483756 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-logs\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.483779 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-config-data\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.494395 4811 scope.go:117] "RemoveContainer" containerID="5907a42ff1711e16c1f205ce4b6ee860d3d1ccb8326e1a557c2cdccf4c68b4b6" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587282 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-logs\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587383 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587447 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v9r6\" (UniqueName: \"kubernetes.io/projected/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-kube-api-access-9v9r6\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587473 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587740 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587912 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.587973 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.588003 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.588048 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.588087 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.589622 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwblz\" (UniqueName: \"kubernetes.io/projected/6632e30b-e20e-44d0-848f-90221b0e31f8-kube-api-access-jwblz\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.589709 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.589750 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-scripts\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.589828 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.589903 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-logs\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.589934 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-config-data\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.590310 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.590368 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-logs\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.591167 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.597192 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.601366 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.601841 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.602235 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-config-data\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.606307 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-scripts\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.618416 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwblz\" (UniqueName: \"kubernetes.io/projected/6632e30b-e20e-44d0-848f-90221b0e31f8-kube-api-access-jwblz\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.660330 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703550 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703678 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-logs\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703751 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703772 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v9r6\" (UniqueName: \"kubernetes.io/projected/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-kube-api-access-9v9r6\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703854 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703911 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703948 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.703997 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.705229 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.742554 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.745248 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.753309 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.755811 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-logs\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.763768 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.769939 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v9r6\" (UniqueName: \"kubernetes.io/projected/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-kube-api-access-9v9r6\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.890871 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:09 crc kubenswrapper[4811]: I0128 16:08:09.894799 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.011256 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.076878 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.285165 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerStarted","Data":"b55d6f3771bfcb18d5fcb592319cef1f0e2e656a40b67dac3a81d940923e94dd"} Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.360752 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="325d2c92-aca2-4660-b328-f0ebf4d03b7d" path="/var/lib/kubelet/pods/325d2c92-aca2-4660-b328-f0ebf4d03b7d/volumes" Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.362671 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="440e4e34-c373-4182-b3e2-ca4f2de8e825" path="/var/lib/kubelet/pods/440e4e34-c373-4182-b3e2-ca4f2de8e825/volumes" Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.636018 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:10 crc kubenswrapper[4811]: I0128 16:08:10.947415 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:10 crc kubenswrapper[4811]: W0128 16:08:10.963815 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa9f0f6_82aa_44c9_9f54_33538efb87bf.slice/crio-210ef4ec949a6709eaa49ab9b8fc28730ea98c1ffc69852e9ba68f0d3fd37d8d WatchSource:0}: Error finding container 210ef4ec949a6709eaa49ab9b8fc28730ea98c1ffc69852e9ba68f0d3fd37d8d: Status 404 returned error can't find the container with id 210ef4ec949a6709eaa49ab9b8fc28730ea98c1ffc69852e9ba68f0d3fd37d8d Jan 28 16:08:11 crc kubenswrapper[4811]: I0128 16:08:11.078094 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:08:11 crc kubenswrapper[4811]: I0128 16:08:11.217611 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-md4rx"] Jan 28 16:08:11 crc kubenswrapper[4811]: I0128 16:08:11.217973 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8495b76777-md4rx" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="dnsmasq-dns" containerID="cri-o://1857aece1c28a54547dcfcd42cea132c99e0a01aebb8d17bd1c22c2f7eca4e0a" gracePeriod=10 Jan 28 16:08:11 crc kubenswrapper[4811]: I0128 16:08:11.394440 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0fa9f0f6-82aa-44c9-9f54-33538efb87bf","Type":"ContainerStarted","Data":"210ef4ec949a6709eaa49ab9b8fc28730ea98c1ffc69852e9ba68f0d3fd37d8d"} Jan 28 16:08:11 crc kubenswrapper[4811]: I0128 16:08:11.400274 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6632e30b-e20e-44d0-848f-90221b0e31f8","Type":"ContainerStarted","Data":"4e797e9c0a8926cd81ec403d6804cde92c7262e4c20949a113237fdc2d888c0c"} Jan 28 16:08:11 crc kubenswrapper[4811]: I0128 16:08:11.407553 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ff73f2e3-0c2c-4008-bc61-36e65a0ad776","Type":"ContainerStarted","Data":"86828187d9cd31239028cb631f45f10cb80918e0ec369e86d8c2a82f84f3b061"} Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.154894 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8495b76777-md4rx" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: connect: connection refused" Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.441011 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ff73f2e3-0c2c-4008-bc61-36e65a0ad776","Type":"ContainerStarted","Data":"d1bc0298c4d4006930b0858e1c0d43af29a7daf4a41eaa0feae8d160b91e3120"} Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.451638 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6632e30b-e20e-44d0-848f-90221b0e31f8","Type":"ContainerStarted","Data":"def92992084d92129bb1bd8e21fe1947d715fc2dae7f5c1c67b1e621cb755121"} Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.480846 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.48082021 podStartE2EDuration="5.48082021s" podCreationTimestamp="2026-01-28 16:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:12.475036034 +0000 UTC m=+1385.229399637" watchObservedRunningTime="2026-01-28 16:08:12.48082021 +0000 UTC m=+1385.235183793" Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.487299 4811 generic.go:334] "Generic (PLEG): container finished" podID="4be24dd0-394a-4835-9cb9-2684c2492079" containerID="1857aece1c28a54547dcfcd42cea132c99e0a01aebb8d17bd1c22c2f7eca4e0a" exitCode=0 Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.487607 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-md4rx" event={"ID":"4be24dd0-394a-4835-9cb9-2684c2492079","Type":"ContainerDied","Data":"1857aece1c28a54547dcfcd42cea132c99e0a01aebb8d17bd1c22c2f7eca4e0a"} Jan 28 16:08:12 crc kubenswrapper[4811]: I0128 16:08:12.724542 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 28 16:08:13 crc kubenswrapper[4811]: I0128 16:08:13.512610 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0fa9f0f6-82aa-44c9-9f54-33538efb87bf","Type":"ContainerStarted","Data":"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1"} Jan 28 16:08:15 crc kubenswrapper[4811]: I0128 16:08:15.994053 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:17 crc kubenswrapper[4811]: I0128 16:08:17.978384 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 28 16:08:18 crc kubenswrapper[4811]: I0128 16:08:18.516681 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.591291 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.707374 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-2952-account-create-update-8fmfb"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.708778 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.725535 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.727620 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-x2dtb"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.728914 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.744989 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-x2dtb"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.773582 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2952-account-create-update-8fmfb"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.839312 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa06bb82-ce18-4cad-b539-b9ec87700437-operator-scripts\") pod \"nova-api-2952-account-create-update-8fmfb\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.839576 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqc2t\" (UniqueName: \"kubernetes.io/projected/fa06bb82-ce18-4cad-b539-b9ec87700437-kube-api-access-sqc2t\") pod \"nova-api-2952-account-create-update-8fmfb\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.839688 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44b8b\" (UniqueName: \"kubernetes.io/projected/120fd687-33f8-4530-9a07-c0388d53fd4e-kube-api-access-44b8b\") pod \"nova-api-db-create-x2dtb\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.839756 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/120fd687-33f8-4530-9a07-c0388d53fd4e-operator-scripts\") pod \"nova-api-db-create-x2dtb\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.870519 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-mdvqk"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.878012 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.890500 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mdvqk"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.928762 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-lb2cx"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.929803 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.942681 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2cb09ee7-ca68-4a03-9190-242af91783b1-operator-scripts\") pod \"nova-cell0-db-create-mdvqk\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.942832 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa06bb82-ce18-4cad-b539-b9ec87700437-operator-scripts\") pod \"nova-api-2952-account-create-update-8fmfb\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.942938 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqc2t\" (UniqueName: \"kubernetes.io/projected/fa06bb82-ce18-4cad-b539-b9ec87700437-kube-api-access-sqc2t\") pod \"nova-api-2952-account-create-update-8fmfb\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.943029 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44b8b\" (UniqueName: \"kubernetes.io/projected/120fd687-33f8-4530-9a07-c0388d53fd4e-kube-api-access-44b8b\") pod \"nova-api-db-create-x2dtb\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.943075 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqgzp\" (UniqueName: \"kubernetes.io/projected/2cb09ee7-ca68-4a03-9190-242af91783b1-kube-api-access-jqgzp\") pod \"nova-cell0-db-create-mdvqk\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.943113 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/120fd687-33f8-4530-9a07-c0388d53fd4e-operator-scripts\") pod \"nova-api-db-create-x2dtb\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.944178 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa06bb82-ce18-4cad-b539-b9ec87700437-operator-scripts\") pod \"nova-api-2952-account-create-update-8fmfb\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.949581 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/120fd687-33f8-4530-9a07-c0388d53fd4e-operator-scripts\") pod \"nova-api-db-create-x2dtb\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.951514 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-xdmdm"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.952930 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.958178 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.968414 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-lb2cx"] Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.984074 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44b8b\" (UniqueName: \"kubernetes.io/projected/120fd687-33f8-4530-9a07-c0388d53fd4e-kube-api-access-44b8b\") pod \"nova-api-db-create-x2dtb\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:19 crc kubenswrapper[4811]: I0128 16:08:19.999391 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-xdmdm"] Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.003118 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqc2t\" (UniqueName: \"kubernetes.io/projected/fa06bb82-ce18-4cad-b539-b9ec87700437-kube-api-access-sqc2t\") pod \"nova-api-2952-account-create-update-8fmfb\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044423 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqgzp\" (UniqueName: \"kubernetes.io/projected/2cb09ee7-ca68-4a03-9190-242af91783b1-kube-api-access-jqgzp\") pod \"nova-cell0-db-create-mdvqk\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044500 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpklj\" (UniqueName: \"kubernetes.io/projected/9dec1ac3-ea25-430b-8934-fda54a06648c-kube-api-access-tpklj\") pod \"nova-cell1-db-create-lb2cx\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044519 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgqlz\" (UniqueName: \"kubernetes.io/projected/d853eecf-d953-46ff-bf52-92c60a9213f6-kube-api-access-xgqlz\") pod \"nova-cell0-99f2-account-create-update-xdmdm\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044577 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d853eecf-d953-46ff-bf52-92c60a9213f6-operator-scripts\") pod \"nova-cell0-99f2-account-create-update-xdmdm\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044598 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2cb09ee7-ca68-4a03-9190-242af91783b1-operator-scripts\") pod \"nova-cell0-db-create-mdvqk\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044640 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec1ac3-ea25-430b-8934-fda54a06648c-operator-scripts\") pod \"nova-cell1-db-create-lb2cx\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.044818 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.046059 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2cb09ee7-ca68-4a03-9190-242af91783b1-operator-scripts\") pod \"nova-cell0-db-create-mdvqk\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.054452 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.094417 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqgzp\" (UniqueName: \"kubernetes.io/projected/2cb09ee7-ca68-4a03-9190-242af91783b1-kube-api-access-jqgzp\") pod \"nova-cell0-db-create-mdvqk\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.141177 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-395c-account-create-update-gl7x6"] Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.142925 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.149056 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.156138 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-395c-account-create-update-gl7x6"] Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.158697 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpklj\" (UniqueName: \"kubernetes.io/projected/9dec1ac3-ea25-430b-8934-fda54a06648c-kube-api-access-tpklj\") pod \"nova-cell1-db-create-lb2cx\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.158767 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgqlz\" (UniqueName: \"kubernetes.io/projected/d853eecf-d953-46ff-bf52-92c60a9213f6-kube-api-access-xgqlz\") pod \"nova-cell0-99f2-account-create-update-xdmdm\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.158993 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d853eecf-d953-46ff-bf52-92c60a9213f6-operator-scripts\") pod \"nova-cell0-99f2-account-create-update-xdmdm\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.159149 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec1ac3-ea25-430b-8934-fda54a06648c-operator-scripts\") pod \"nova-cell1-db-create-lb2cx\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.160154 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d853eecf-d953-46ff-bf52-92c60a9213f6-operator-scripts\") pod \"nova-cell0-99f2-account-create-update-xdmdm\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.160275 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec1ac3-ea25-430b-8934-fda54a06648c-operator-scripts\") pod \"nova-cell1-db-create-lb2cx\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.200226 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.215229 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpklj\" (UniqueName: \"kubernetes.io/projected/9dec1ac3-ea25-430b-8934-fda54a06648c-kube-api-access-tpklj\") pod \"nova-cell1-db-create-lb2cx\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.220726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgqlz\" (UniqueName: \"kubernetes.io/projected/d853eecf-d953-46ff-bf52-92c60a9213f6-kube-api-access-xgqlz\") pod \"nova-cell0-99f2-account-create-update-xdmdm\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.252052 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.260886 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5f6z\" (UniqueName: \"kubernetes.io/projected/7db6998b-1c77-4473-8656-6e64c5f14688-kube-api-access-k5f6z\") pod \"nova-cell1-395c-account-create-update-gl7x6\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.260943 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7db6998b-1c77-4473-8656-6e64c5f14688-operator-scripts\") pod \"nova-cell1-395c-account-create-update-gl7x6\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.277898 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.363154 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5f6z\" (UniqueName: \"kubernetes.io/projected/7db6998b-1c77-4473-8656-6e64c5f14688-kube-api-access-k5f6z\") pod \"nova-cell1-395c-account-create-update-gl7x6\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.363217 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7db6998b-1c77-4473-8656-6e64c5f14688-operator-scripts\") pod \"nova-cell1-395c-account-create-update-gl7x6\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.381832 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5f6z\" (UniqueName: \"kubernetes.io/projected/7db6998b-1c77-4473-8656-6e64c5f14688-kube-api-access-k5f6z\") pod \"nova-cell1-395c-account-create-update-gl7x6\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.391257 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7db6998b-1c77-4473-8656-6e64c5f14688-operator-scripts\") pod \"nova-cell1-395c-account-create-update-gl7x6\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:20 crc kubenswrapper[4811]: I0128 16:08:20.462592 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.582812 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.651803 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-md4rx" event={"ID":"4be24dd0-394a-4835-9cb9-2684c2492079","Type":"ContainerDied","Data":"dc06a1ba9a7f2d61a5c9e81e18cbff2158e49d3195a8bc19e2e12cf91e1a3661"} Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.651890 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-md4rx" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.651925 4811 scope.go:117] "RemoveContainer" containerID="1857aece1c28a54547dcfcd42cea132c99e0a01aebb8d17bd1c22c2f7eca4e0a" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.700027 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6gzt\" (UniqueName: \"kubernetes.io/projected/4be24dd0-394a-4835-9cb9-2684c2492079-kube-api-access-l6gzt\") pod \"4be24dd0-394a-4835-9cb9-2684c2492079\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.700253 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-swift-storage-0\") pod \"4be24dd0-394a-4835-9cb9-2684c2492079\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.700365 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-sb\") pod \"4be24dd0-394a-4835-9cb9-2684c2492079\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.700401 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-config\") pod \"4be24dd0-394a-4835-9cb9-2684c2492079\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.700481 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-nb\") pod \"4be24dd0-394a-4835-9cb9-2684c2492079\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.700624 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-svc\") pod \"4be24dd0-394a-4835-9cb9-2684c2492079\" (UID: \"4be24dd0-394a-4835-9cb9-2684c2492079\") " Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.715258 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4be24dd0-394a-4835-9cb9-2684c2492079-kube-api-access-l6gzt" (OuterVolumeSpecName: "kube-api-access-l6gzt") pod "4be24dd0-394a-4835-9cb9-2684c2492079" (UID: "4be24dd0-394a-4835-9cb9-2684c2492079"). InnerVolumeSpecName "kube-api-access-l6gzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.783638 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4be24dd0-394a-4835-9cb9-2684c2492079" (UID: "4be24dd0-394a-4835-9cb9-2684c2492079"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.792689 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4be24dd0-394a-4835-9cb9-2684c2492079" (UID: "4be24dd0-394a-4835-9cb9-2684c2492079"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.803187 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6gzt\" (UniqueName: \"kubernetes.io/projected/4be24dd0-394a-4835-9cb9-2684c2492079-kube-api-access-l6gzt\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.803236 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.803246 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.812413 4811 scope.go:117] "RemoveContainer" containerID="1f6c627b7301448100cbd519abc5845b3e7b0e44a608610675a09d0300128e2d" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.837600 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-config" (OuterVolumeSpecName: "config") pod "4be24dd0-394a-4835-9cb9-2684c2492079" (UID: "4be24dd0-394a-4835-9cb9-2684c2492079"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.858396 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4be24dd0-394a-4835-9cb9-2684c2492079" (UID: "4be24dd0-394a-4835-9cb9-2684c2492079"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.864911 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4be24dd0-394a-4835-9cb9-2684c2492079" (UID: "4be24dd0-394a-4835-9cb9-2684c2492079"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.905581 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.905619 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:21 crc kubenswrapper[4811]: I0128 16:08:21.905631 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be24dd0-394a-4835-9cb9-2684c2492079-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.090556 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-xdmdm"] Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.136318 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-md4rx"] Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.155967 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8495b76777-md4rx" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.161241 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-md4rx"] Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.283407 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-x2dtb"] Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.311466 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-395c-account-create-update-gl7x6"] Jan 28 16:08:22 crc kubenswrapper[4811]: W0128 16:08:22.316649 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7db6998b_1c77_4473_8656_6e64c5f14688.slice/crio-86fa2a08ab2bd6e1bcca213cec8fc197b87aabfd8f128dd6ee9b3e08e55b15af WatchSource:0}: Error finding container 86fa2a08ab2bd6e1bcca213cec8fc197b87aabfd8f128dd6ee9b3e08e55b15af: Status 404 returned error can't find the container with id 86fa2a08ab2bd6e1bcca213cec8fc197b87aabfd8f128dd6ee9b3e08e55b15af Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.372721 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" path="/var/lib/kubelet/pods/4be24dd0-394a-4835-9cb9-2684c2492079/volumes" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.521669 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2952-account-create-update-8fmfb"] Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.565314 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mdvqk"] Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.669046 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-lb2cx"] Jan 28 16:08:22 crc kubenswrapper[4811]: W0128 16:08:22.682417 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dec1ac3_ea25_430b_8934_fda54a06648c.slice/crio-4dc4681da8c7653c406200c617befead830f745baf6c433eaf1c23731f5d8a18 WatchSource:0}: Error finding container 4dc4681da8c7653c406200c617befead830f745baf6c433eaf1c23731f5d8a18: Status 404 returned error can't find the container with id 4dc4681da8c7653c406200c617befead830f745baf6c433eaf1c23731f5d8a18 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.696988 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x2dtb" event={"ID":"120fd687-33f8-4530-9a07-c0388d53fd4e","Type":"ContainerStarted","Data":"5339a634a87a740049c83286d07d362f3bfc8b28b45e9db54790227cd799ad11"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.697024 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x2dtb" event={"ID":"120fd687-33f8-4530-9a07-c0388d53fd4e","Type":"ContainerStarted","Data":"4ad246818f95a000a69385a690c2b5d6e8f87fb7943d54848a663094aa90ad07"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.704306 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0fa9f0f6-82aa-44c9-9f54-33538efb87bf","Type":"ContainerStarted","Data":"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.704456 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-log" containerID="cri-o://6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.704670 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-httpd" containerID="cri-o://9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.708757 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-log" containerID="cri-o://def92992084d92129bb1bd8e21fe1947d715fc2dae7f5c1c67b1e621cb755121" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.708907 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-httpd" containerID="cri-o://7030ec48db46a6003c5b25adb8cdea91892ee1fc6b380538b431e48a179ceb9e" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.719215 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-x2dtb" podStartSLOduration=3.719187518 podStartE2EDuration="3.719187518s" podCreationTimestamp="2026-01-28 16:08:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:22.713172585 +0000 UTC m=+1395.467536168" watchObservedRunningTime="2026-01-28 16:08:22.719187518 +0000 UTC m=+1395.473551101" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.721537 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mdvqk" event={"ID":"2cb09ee7-ca68-4a03-9190-242af91783b1","Type":"ContainerStarted","Data":"fe9e2df7d8097ab0db724e7684196b2c269164548a21934cbba40be052fa838d"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.722803 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2952-account-create-update-8fmfb" event={"ID":"fa06bb82-ce18-4cad-b539-b9ec87700437","Type":"ContainerStarted","Data":"2bb124be14df1b4e0c64dfa23d4d97a6a1a58d514ae68e79d6e9ba43df863941"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.730975 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerStarted","Data":"85f27822cc57fe5cd5364ebf4bc5dbf6901ca7b868ef9d6f5c11e934e8740427"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.732172 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-central-agent" containerID="cri-o://1a257c8830ba6170421d2471c2be9db66903601803564b624f1aad7a190187a4" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.732229 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.732286 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-notification-agent" containerID="cri-o://50c648e420c733a8c5868c472084552bba3edf2a988519678694481205f719e1" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.732303 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="sg-core" containerID="cri-o://b55d6f3771bfcb18d5fcb592319cef1f0e2e656a40b67dac3a81d940923e94dd" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.732396 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="proxy-httpd" containerID="cri-o://85f27822cc57fe5cd5364ebf4bc5dbf6901ca7b868ef9d6f5c11e934e8740427" gracePeriod=30 Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.738348 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=13.738327216 podStartE2EDuration="13.738327216s" podCreationTimestamp="2026-01-28 16:08:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:22.730274098 +0000 UTC m=+1395.484637681" watchObservedRunningTime="2026-01-28 16:08:22.738327216 +0000 UTC m=+1395.492690799" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.747075 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" event={"ID":"7db6998b-1c77-4473-8656-6e64c5f14688","Type":"ContainerStarted","Data":"a5491b7008847bdd58a8450727ced7dbf63f1b967864e6d2e88817c2d77ec750"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.747131 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" event={"ID":"7db6998b-1c77-4473-8656-6e64c5f14688","Type":"ContainerStarted","Data":"86fa2a08ab2bd6e1bcca213cec8fc197b87aabfd8f128dd6ee9b3e08e55b15af"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.759707 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" event={"ID":"d853eecf-d953-46ff-bf52-92c60a9213f6","Type":"ContainerStarted","Data":"8394fb63167b883b169be61e77ba09dc1dc48ea0db4cbe8e5d06a4310c849ea9"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.759758 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" event={"ID":"d853eecf-d953-46ff-bf52-92c60a9213f6","Type":"ContainerStarted","Data":"d4d787ffd76e663f87a71b6e7de943a0f5156348dd716a3812c6096f1c939b75"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.760105 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.760087994 podStartE2EDuration="13.760087994s" podCreationTimestamp="2026-01-28 16:08:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:22.758813731 +0000 UTC m=+1395.513177314" watchObservedRunningTime="2026-01-28 16:08:22.760087994 +0000 UTC m=+1395.514451577" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.773647 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc","Type":"ContainerStarted","Data":"17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886"} Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.791012 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.624376347 podStartE2EDuration="17.790993032s" podCreationTimestamp="2026-01-28 16:08:05 +0000 UTC" firstStartedPulling="2026-01-28 16:08:06.299666469 +0000 UTC m=+1379.054030052" lastFinishedPulling="2026-01-28 16:08:21.466283154 +0000 UTC m=+1394.220646737" observedRunningTime="2026-01-28 16:08:22.787271381 +0000 UTC m=+1395.541634964" watchObservedRunningTime="2026-01-28 16:08:22.790993032 +0000 UTC m=+1395.545356625" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.816956 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" podStartSLOduration=2.816938013 podStartE2EDuration="2.816938013s" podCreationTimestamp="2026-01-28 16:08:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:22.809817401 +0000 UTC m=+1395.564181004" watchObservedRunningTime="2026-01-28 16:08:22.816938013 +0000 UTC m=+1395.571301596" Jan 28 16:08:22 crc kubenswrapper[4811]: I0128 16:08:22.863345 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.704458336 podStartE2EDuration="34.863317088s" podCreationTimestamp="2026-01-28 16:07:48 +0000 UTC" firstStartedPulling="2026-01-28 16:07:49.598136159 +0000 UTC m=+1362.352499742" lastFinishedPulling="2026-01-28 16:08:21.756994911 +0000 UTC m=+1394.511358494" observedRunningTime="2026-01-28 16:08:22.848736434 +0000 UTC m=+1395.603100017" watchObservedRunningTime="2026-01-28 16:08:22.863317088 +0000 UTC m=+1395.617680671" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.378120 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.449700 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-httpd-run\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450098 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450149 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9v9r6\" (UniqueName: \"kubernetes.io/projected/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-kube-api-access-9v9r6\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450246 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-internal-tls-certs\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450304 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-combined-ca-bundle\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450330 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-logs\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450378 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-scripts\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450452 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-config-data\") pod \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\" (UID: \"0fa9f0f6-82aa-44c9-9f54-33538efb87bf\") " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.450995 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-logs" (OuterVolumeSpecName: "logs") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.451342 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.451465 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.458197 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-scripts" (OuterVolumeSpecName: "scripts") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.458294 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-kube-api-access-9v9r6" (OuterVolumeSpecName: "kube-api-access-9v9r6") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "kube-api-access-9v9r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.461171 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.502352 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.518782 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-config-data" (OuterVolumeSpecName: "config-data") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.527162 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0fa9f0f6-82aa-44c9-9f54-33538efb87bf" (UID: "0fa9f0f6-82aa-44c9-9f54-33538efb87bf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.553228 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.553308 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.553326 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9v9r6\" (UniqueName: \"kubernetes.io/projected/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-kube-api-access-9v9r6\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.553343 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.553355 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.553366 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9f0f6-82aa-44c9-9f54-33538efb87bf-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.577451 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.655295 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.784075 4811 generic.go:334] "Generic (PLEG): container finished" podID="d853eecf-d953-46ff-bf52-92c60a9213f6" containerID="8394fb63167b883b169be61e77ba09dc1dc48ea0db4cbe8e5d06a4310c849ea9" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.784137 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" event={"ID":"d853eecf-d953-46ff-bf52-92c60a9213f6","Type":"ContainerDied","Data":"8394fb63167b883b169be61e77ba09dc1dc48ea0db4cbe8e5d06a4310c849ea9"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.785803 4811 generic.go:334] "Generic (PLEG): container finished" podID="120fd687-33f8-4530-9a07-c0388d53fd4e" containerID="5339a634a87a740049c83286d07d362f3bfc8b28b45e9db54790227cd799ad11" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.785857 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x2dtb" event={"ID":"120fd687-33f8-4530-9a07-c0388d53fd4e","Type":"ContainerDied","Data":"5339a634a87a740049c83286d07d362f3bfc8b28b45e9db54790227cd799ad11"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.790866 4811 generic.go:334] "Generic (PLEG): container finished" podID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerID="85f27822cc57fe5cd5364ebf4bc5dbf6901ca7b868ef9d6f5c11e934e8740427" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.790895 4811 generic.go:334] "Generic (PLEG): container finished" podID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerID="b55d6f3771bfcb18d5fcb592319cef1f0e2e656a40b67dac3a81d940923e94dd" exitCode=2 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.790904 4811 generic.go:334] "Generic (PLEG): container finished" podID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerID="1a257c8830ba6170421d2471c2be9db66903601803564b624f1aad7a190187a4" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.790947 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerDied","Data":"85f27822cc57fe5cd5364ebf4bc5dbf6901ca7b868ef9d6f5c11e934e8740427"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.790975 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerDied","Data":"b55d6f3771bfcb18d5fcb592319cef1f0e2e656a40b67dac3a81d940923e94dd"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.790988 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerDied","Data":"1a257c8830ba6170421d2471c2be9db66903601803564b624f1aad7a190187a4"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.792257 4811 generic.go:334] "Generic (PLEG): container finished" podID="9dec1ac3-ea25-430b-8934-fda54a06648c" containerID="5f95164c6bc5517fe5021ef5b89f914d6a0e68587b77f5bb6c5c012a938fb36a" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.792303 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lb2cx" event={"ID":"9dec1ac3-ea25-430b-8934-fda54a06648c","Type":"ContainerDied","Data":"5f95164c6bc5517fe5021ef5b89f914d6a0e68587b77f5bb6c5c012a938fb36a"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.792320 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lb2cx" event={"ID":"9dec1ac3-ea25-430b-8934-fda54a06648c","Type":"ContainerStarted","Data":"4dc4681da8c7653c406200c617befead830f745baf6c433eaf1c23731f5d8a18"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.795494 4811 generic.go:334] "Generic (PLEG): container finished" podID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerID="7030ec48db46a6003c5b25adb8cdea91892ee1fc6b380538b431e48a179ceb9e" exitCode=143 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.795532 4811 generic.go:334] "Generic (PLEG): container finished" podID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerID="def92992084d92129bb1bd8e21fe1947d715fc2dae7f5c1c67b1e621cb755121" exitCode=143 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.795578 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6632e30b-e20e-44d0-848f-90221b0e31f8","Type":"ContainerDied","Data":"7030ec48db46a6003c5b25adb8cdea91892ee1fc6b380538b431e48a179ceb9e"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.795608 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6632e30b-e20e-44d0-848f-90221b0e31f8","Type":"ContainerDied","Data":"def92992084d92129bb1bd8e21fe1947d715fc2dae7f5c1c67b1e621cb755121"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.808367 4811 generic.go:334] "Generic (PLEG): container finished" podID="2cb09ee7-ca68-4a03-9190-242af91783b1" containerID="5704b2f39ed016d4b37800c5475f466e0b8a3d7d89c94a7a9f250a40170a8295" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.808462 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mdvqk" event={"ID":"2cb09ee7-ca68-4a03-9190-242af91783b1","Type":"ContainerDied","Data":"5704b2f39ed016d4b37800c5475f466e0b8a3d7d89c94a7a9f250a40170a8295"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.810126 4811 generic.go:334] "Generic (PLEG): container finished" podID="7db6998b-1c77-4473-8656-6e64c5f14688" containerID="a5491b7008847bdd58a8450727ced7dbf63f1b967864e6d2e88817c2d77ec750" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.810177 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" event={"ID":"7db6998b-1c77-4473-8656-6e64c5f14688","Type":"ContainerDied","Data":"a5491b7008847bdd58a8450727ced7dbf63f1b967864e6d2e88817c2d77ec750"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.811402 4811 generic.go:334] "Generic (PLEG): container finished" podID="fa06bb82-ce18-4cad-b539-b9ec87700437" containerID="502008b5e1203cf387522bb1e522bcd6610092f38c7e8f24a175db1f14e4d5a3" exitCode=0 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.811478 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2952-account-create-update-8fmfb" event={"ID":"fa06bb82-ce18-4cad-b539-b9ec87700437","Type":"ContainerDied","Data":"502008b5e1203cf387522bb1e522bcd6610092f38c7e8f24a175db1f14e4d5a3"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.817481 4811 generic.go:334] "Generic (PLEG): container finished" podID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerID="9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee" exitCode=143 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.817522 4811 generic.go:334] "Generic (PLEG): container finished" podID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerID="6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1" exitCode=143 Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.827414 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.829769 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0fa9f0f6-82aa-44c9-9f54-33538efb87bf","Type":"ContainerDied","Data":"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.829876 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0fa9f0f6-82aa-44c9-9f54-33538efb87bf","Type":"ContainerDied","Data":"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.829895 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0fa9f0f6-82aa-44c9-9f54-33538efb87bf","Type":"ContainerDied","Data":"210ef4ec949a6709eaa49ab9b8fc28730ea98c1ffc69852e9ba68f0d3fd37d8d"} Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.829916 4811 scope.go:117] "RemoveContainer" containerID="9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.947354 4811 scope.go:117] "RemoveContainer" containerID="6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1" Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.969838 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:23 crc kubenswrapper[4811]: I0128 16:08:23.980495 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:23.999507 4811 scope.go:117] "RemoveContainer" containerID="9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.000234 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee\": container with ID starting with 9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee not found: ID does not exist" containerID="9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.001117 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee"} err="failed to get container status \"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee\": rpc error: code = NotFound desc = could not find container \"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee\": container with ID starting with 9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee not found: ID does not exist" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.001155 4811 scope.go:117] "RemoveContainer" containerID="6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.002937 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1\": container with ID starting with 6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1 not found: ID does not exist" containerID="6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.002984 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1"} err="failed to get container status \"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1\": rpc error: code = NotFound desc = could not find container \"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1\": container with ID starting with 6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1 not found: ID does not exist" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003008 4811 scope.go:117] "RemoveContainer" containerID="9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003080 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.003641 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-log" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003663 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-log" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.003682 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="init" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003690 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="init" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.003703 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="dnsmasq-dns" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003711 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="dnsmasq-dns" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.003728 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-httpd" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003736 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-httpd" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.003991 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-log" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.004032 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" containerName="glance-httpd" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.004051 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4be24dd0-394a-4835-9cb9-2684c2492079" containerName="dnsmasq-dns" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.005239 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.006303 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee"} err="failed to get container status \"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee\": rpc error: code = NotFound desc = could not find container \"9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee\": container with ID starting with 9fd10a205629268bc3659634d23bf3762eef4650ca29c8546ea0b24e43642eee not found: ID does not exist" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.006338 4811 scope.go:117] "RemoveContainer" containerID="6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.006814 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1"} err="failed to get container status \"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1\": rpc error: code = NotFound desc = could not find container \"6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1\": container with ID starting with 6d2156a391bac47700cf51e01e775fd4a9a5e045eb21e767048c52f19b97ada1 not found: ID does not exist" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.008889 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.009710 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.016764 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063119 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063210 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063259 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063281 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-logs\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063300 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tplp\" (UniqueName: \"kubernetes.io/projected/f35795d5-ffac-4851-914a-00dc84496f91-kube-api-access-7tplp\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063323 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063349 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.063385 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.170652 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.170733 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.170976 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.170999 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-logs\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.171024 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tplp\" (UniqueName: \"kubernetes.io/projected/f35795d5-ffac-4851-914a-00dc84496f91-kube-api-access-7tplp\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.171057 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.171090 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.171128 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.171128 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.172198 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.172982 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-logs\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.183909 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.194530 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.195035 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.195914 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tplp\" (UniqueName: \"kubernetes.io/projected/f35795d5-ffac-4851-914a-00dc84496f91-kube-api-access-7tplp\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.196991 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.200508 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.274993 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.365923 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fa9f0f6-82aa-44c9-9f54-33538efb87bf" path="/var/lib/kubelet/pods/0fa9f0f6-82aa-44c9-9f54-33538efb87bf/volumes" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.370776 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.375219 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d853eecf-d953-46ff-bf52-92c60a9213f6-operator-scripts\") pod \"d853eecf-d953-46ff-bf52-92c60a9213f6\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.375453 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgqlz\" (UniqueName: \"kubernetes.io/projected/d853eecf-d953-46ff-bf52-92c60a9213f6-kube-api-access-xgqlz\") pod \"d853eecf-d953-46ff-bf52-92c60a9213f6\" (UID: \"d853eecf-d953-46ff-bf52-92c60a9213f6\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.375706 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d853eecf-d953-46ff-bf52-92c60a9213f6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d853eecf-d953-46ff-bf52-92c60a9213f6" (UID: "d853eecf-d953-46ff-bf52-92c60a9213f6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.376140 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d853eecf-d953-46ff-bf52-92c60a9213f6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.383118 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d853eecf-d953-46ff-bf52-92c60a9213f6-kube-api-access-xgqlz" (OuterVolumeSpecName: "kube-api-access-xgqlz") pod "d853eecf-d953-46ff-bf52-92c60a9213f6" (UID: "d853eecf-d953-46ff-bf52-92c60a9213f6"). InnerVolumeSpecName "kube-api-access-xgqlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.444041 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.483664 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-httpd-run\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.484223 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.484341 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-public-tls-certs\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.484446 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwblz\" (UniqueName: \"kubernetes.io/projected/6632e30b-e20e-44d0-848f-90221b0e31f8-kube-api-access-jwblz\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.484851 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-scripts\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.485002 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-logs\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.485089 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-config-data\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.485178 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-combined-ca-bundle\") pod \"6632e30b-e20e-44d0-848f-90221b0e31f8\" (UID: \"6632e30b-e20e-44d0-848f-90221b0e31f8\") " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.485915 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgqlz\" (UniqueName: \"kubernetes.io/projected/d853eecf-d953-46ff-bf52-92c60a9213f6-kube-api-access-xgqlz\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.484541 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.487273 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-logs" (OuterVolumeSpecName: "logs") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.491585 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.495093 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6632e30b-e20e-44d0-848f-90221b0e31f8-kube-api-access-jwblz" (OuterVolumeSpecName: "kube-api-access-jwblz") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "kube-api-access-jwblz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.499877 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-scripts" (OuterVolumeSpecName: "scripts") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.523602 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.540566 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.543526 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-config-data" (OuterVolumeSpecName: "config-data") pod "6632e30b-e20e-44d0-848f-90221b0e31f8" (UID: "6632e30b-e20e-44d0-848f-90221b0e31f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588518 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588556 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588566 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588576 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6632e30b-e20e-44d0-848f-90221b0e31f8-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588611 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588621 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588631 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwblz\" (UniqueName: \"kubernetes.io/projected/6632e30b-e20e-44d0-848f-90221b0e31f8-kube-api-access-jwblz\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.588639 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6632e30b-e20e-44d0-848f-90221b0e31f8-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.616707 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.690340 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.827911 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6632e30b-e20e-44d0-848f-90221b0e31f8","Type":"ContainerDied","Data":"4e797e9c0a8926cd81ec403d6804cde92c7262e4c20949a113237fdc2d888c0c"} Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.827969 4811 scope.go:117] "RemoveContainer" containerID="7030ec48db46a6003c5b25adb8cdea91892ee1fc6b380538b431e48a179ceb9e" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.827987 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.829778 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.829787 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-99f2-account-create-update-xdmdm" event={"ID":"d853eecf-d953-46ff-bf52-92c60a9213f6","Type":"ContainerDied","Data":"d4d787ffd76e663f87a71b6e7de943a0f5156348dd716a3812c6096f1c939b75"} Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.829820 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4d787ffd76e663f87a71b6e7de943a0f5156348dd716a3812c6096f1c939b75" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.833125 4811 generic.go:334] "Generic (PLEG): container finished" podID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerID="50c648e420c733a8c5868c472084552bba3edf2a988519678694481205f719e1" exitCode=0 Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.833181 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerDied","Data":"50c648e420c733a8c5868c472084552bba3edf2a988519678694481205f719e1"} Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.855075 4811 scope.go:117] "RemoveContainer" containerID="def92992084d92129bb1bd8e21fe1947d715fc2dae7f5c1c67b1e621cb755121" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.910805 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.928747 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.937500 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.937903 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d853eecf-d953-46ff-bf52-92c60a9213f6" containerName="mariadb-account-create-update" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.937921 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d853eecf-d953-46ff-bf52-92c60a9213f6" containerName="mariadb-account-create-update" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.937931 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-httpd" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.937938 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-httpd" Jan 28 16:08:24 crc kubenswrapper[4811]: E0128 16:08:24.937964 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-log" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.937970 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-log" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.938131 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d853eecf-d953-46ff-bf52-92c60a9213f6" containerName="mariadb-account-create-update" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.938149 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-log" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.938159 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" containerName="glance-httpd" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.939219 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.941885 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.942115 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 28 16:08:24 crc kubenswrapper[4811]: I0128 16:08:24.960338 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.097938 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq6c4\" (UniqueName: \"kubernetes.io/projected/54d8044d-232b-4d32-a2ed-fa2520b6513f-kube-api-access-cq6c4\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098231 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098252 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098310 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098344 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-scripts\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098374 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098400 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-config-data\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.098616 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-logs\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200611 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200669 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-scripts\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200706 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200725 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-config-data\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200774 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-logs\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200801 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq6c4\" (UniqueName: \"kubernetes.io/projected/54d8044d-232b-4d32-a2ed-fa2520b6513f-kube-api-access-cq6c4\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200830 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.200846 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.201342 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.210330 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.211049 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-logs\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.211662 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.213811 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-config-data\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.215856 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-scripts\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.220650 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.229991 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq6c4\" (UniqueName: \"kubernetes.io/projected/54d8044d-232b-4d32-a2ed-fa2520b6513f-kube-api-access-cq6c4\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.301464 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.313907 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.342040 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.383393 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.388423 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.505492 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5f6z\" (UniqueName: \"kubernetes.io/projected/7db6998b-1c77-4473-8656-6e64c5f14688-kube-api-access-k5f6z\") pod \"7db6998b-1c77-4473-8656-6e64c5f14688\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.505603 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqgzp\" (UniqueName: \"kubernetes.io/projected/2cb09ee7-ca68-4a03-9190-242af91783b1-kube-api-access-jqgzp\") pod \"2cb09ee7-ca68-4a03-9190-242af91783b1\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.505830 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7db6998b-1c77-4473-8656-6e64c5f14688-operator-scripts\") pod \"7db6998b-1c77-4473-8656-6e64c5f14688\" (UID: \"7db6998b-1c77-4473-8656-6e64c5f14688\") " Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.505876 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2cb09ee7-ca68-4a03-9190-242af91783b1-operator-scripts\") pod \"2cb09ee7-ca68-4a03-9190-242af91783b1\" (UID: \"2cb09ee7-ca68-4a03-9190-242af91783b1\") " Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.507049 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cb09ee7-ca68-4a03-9190-242af91783b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2cb09ee7-ca68-4a03-9190-242af91783b1" (UID: "2cb09ee7-ca68-4a03-9190-242af91783b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.510119 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7db6998b-1c77-4473-8656-6e64c5f14688-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7db6998b-1c77-4473-8656-6e64c5f14688" (UID: "7db6998b-1c77-4473-8656-6e64c5f14688"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.512347 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db6998b-1c77-4473-8656-6e64c5f14688-kube-api-access-k5f6z" (OuterVolumeSpecName: "kube-api-access-k5f6z") pod "7db6998b-1c77-4473-8656-6e64c5f14688" (UID: "7db6998b-1c77-4473-8656-6e64c5f14688"). InnerVolumeSpecName "kube-api-access-k5f6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.516321 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cb09ee7-ca68-4a03-9190-242af91783b1-kube-api-access-jqgzp" (OuterVolumeSpecName: "kube-api-access-jqgzp") pod "2cb09ee7-ca68-4a03-9190-242af91783b1" (UID: "2cb09ee7-ca68-4a03-9190-242af91783b1"). InnerVolumeSpecName "kube-api-access-jqgzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.610589 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7db6998b-1c77-4473-8656-6e64c5f14688-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.610660 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2cb09ee7-ca68-4a03-9190-242af91783b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.610676 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5f6z\" (UniqueName: \"kubernetes.io/projected/7db6998b-1c77-4473-8656-6e64c5f14688-kube-api-access-k5f6z\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.610691 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqgzp\" (UniqueName: \"kubernetes.io/projected/2cb09ee7-ca68-4a03-9190-242af91783b1-kube-api-access-jqgzp\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.631323 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.716134 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqc2t\" (UniqueName: \"kubernetes.io/projected/fa06bb82-ce18-4cad-b539-b9ec87700437-kube-api-access-sqc2t\") pod \"fa06bb82-ce18-4cad-b539-b9ec87700437\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.716364 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa06bb82-ce18-4cad-b539-b9ec87700437-operator-scripts\") pod \"fa06bb82-ce18-4cad-b539-b9ec87700437\" (UID: \"fa06bb82-ce18-4cad-b539-b9ec87700437\") " Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.724553 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa06bb82-ce18-4cad-b539-b9ec87700437-kube-api-access-sqc2t" (OuterVolumeSpecName: "kube-api-access-sqc2t") pod "fa06bb82-ce18-4cad-b539-b9ec87700437" (UID: "fa06bb82-ce18-4cad-b539-b9ec87700437"). InnerVolumeSpecName "kube-api-access-sqc2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.740693 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa06bb82-ce18-4cad-b539-b9ec87700437-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fa06bb82-ce18-4cad-b539-b9ec87700437" (UID: "fa06bb82-ce18-4cad-b539-b9ec87700437"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.822710 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa06bb82-ce18-4cad-b539-b9ec87700437-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.822957 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqc2t\" (UniqueName: \"kubernetes.io/projected/fa06bb82-ce18-4cad-b539-b9ec87700437-kube-api-access-sqc2t\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.882271 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" event={"ID":"7db6998b-1c77-4473-8656-6e64c5f14688","Type":"ContainerDied","Data":"86fa2a08ab2bd6e1bcca213cec8fc197b87aabfd8f128dd6ee9b3e08e55b15af"} Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.882671 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86fa2a08ab2bd6e1bcca213cec8fc197b87aabfd8f128dd6ee9b3e08e55b15af" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.882489 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-395c-account-create-update-gl7x6" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.896067 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f35795d5-ffac-4851-914a-00dc84496f91","Type":"ContainerStarted","Data":"384d30ad658a64b0ef3eaa69ac0df8c33b198c66d440a43063da0bd02a00c82f"} Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.899772 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.905074 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-8fmfb" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.905221 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2952-account-create-update-8fmfb" event={"ID":"fa06bb82-ce18-4cad-b539-b9ec87700437","Type":"ContainerDied","Data":"2bb124be14df1b4e0c64dfa23d4d97a6a1a58d514ae68e79d6e9ba43df863941"} Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.905251 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bb124be14df1b4e0c64dfa23d4d97a6a1a58d514ae68e79d6e9ba43df863941" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.905419 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.922707 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.926267 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96b236bc-d5d0-4beb-a5cb-a93384cbaca2","Type":"ContainerDied","Data":"3a35fd916ec1b60958aa95b6d78e01ef95505ff4a64c72d0785a53a435a13e0d"} Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.926303 4811 scope.go:117] "RemoveContainer" containerID="85f27822cc57fe5cd5364ebf4bc5dbf6901ca7b868ef9d6f5c11e934e8740427" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.967146 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mdvqk" event={"ID":"2cb09ee7-ca68-4a03-9190-242af91783b1","Type":"ContainerDied","Data":"fe9e2df7d8097ab0db724e7684196b2c269164548a21934cbba40be052fa838d"} Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.967233 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe9e2df7d8097ab0db724e7684196b2c269164548a21934cbba40be052fa838d" Jan 28 16:08:25 crc kubenswrapper[4811]: I0128 16:08:25.967556 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mdvqk" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.003915 4811 scope.go:117] "RemoveContainer" containerID="b55d6f3771bfcb18d5fcb592319cef1f0e2e656a40b67dac3a81d940923e94dd" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.028774 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-config-data\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.028835 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-sg-core-conf-yaml\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.028875 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/120fd687-33f8-4530-9a07-c0388d53fd4e-operator-scripts\") pod \"120fd687-33f8-4530-9a07-c0388d53fd4e\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.028935 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-log-httpd\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029043 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpklj\" (UniqueName: \"kubernetes.io/projected/9dec1ac3-ea25-430b-8934-fda54a06648c-kube-api-access-tpklj\") pod \"9dec1ac3-ea25-430b-8934-fda54a06648c\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029098 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-run-httpd\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029151 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z78sx\" (UniqueName: \"kubernetes.io/projected/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-kube-api-access-z78sx\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029174 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-combined-ca-bundle\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029240 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44b8b\" (UniqueName: \"kubernetes.io/projected/120fd687-33f8-4530-9a07-c0388d53fd4e-kube-api-access-44b8b\") pod \"120fd687-33f8-4530-9a07-c0388d53fd4e\" (UID: \"120fd687-33f8-4530-9a07-c0388d53fd4e\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029272 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec1ac3-ea25-430b-8934-fda54a06648c-operator-scripts\") pod \"9dec1ac3-ea25-430b-8934-fda54a06648c\" (UID: \"9dec1ac3-ea25-430b-8934-fda54a06648c\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.029301 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-scripts\") pod \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\" (UID: \"96b236bc-d5d0-4beb-a5cb-a93384cbaca2\") " Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.033012 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.035562 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-scripts" (OuterVolumeSpecName: "scripts") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.035897 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dec1ac3-ea25-430b-8934-fda54a06648c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9dec1ac3-ea25-430b-8934-fda54a06648c" (UID: "9dec1ac3-ea25-430b-8934-fda54a06648c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.036144 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.036299 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/120fd687-33f8-4530-9a07-c0388d53fd4e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "120fd687-33f8-4530-9a07-c0388d53fd4e" (UID: "120fd687-33f8-4530-9a07-c0388d53fd4e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.036304 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dec1ac3-ea25-430b-8934-fda54a06648c-kube-api-access-tpklj" (OuterVolumeSpecName: "kube-api-access-tpklj") pod "9dec1ac3-ea25-430b-8934-fda54a06648c" (UID: "9dec1ac3-ea25-430b-8934-fda54a06648c"). InnerVolumeSpecName "kube-api-access-tpklj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.038152 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/120fd687-33f8-4530-9a07-c0388d53fd4e-kube-api-access-44b8b" (OuterVolumeSpecName: "kube-api-access-44b8b") pod "120fd687-33f8-4530-9a07-c0388d53fd4e" (UID: "120fd687-33f8-4530-9a07-c0388d53fd4e"). InnerVolumeSpecName "kube-api-access-44b8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.039007 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-kube-api-access-z78sx" (OuterVolumeSpecName: "kube-api-access-z78sx") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "kube-api-access-z78sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.092636 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132395 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132440 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpklj\" (UniqueName: \"kubernetes.io/projected/9dec1ac3-ea25-430b-8934-fda54a06648c-kube-api-access-tpklj\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132454 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132466 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z78sx\" (UniqueName: \"kubernetes.io/projected/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-kube-api-access-z78sx\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132478 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44b8b\" (UniqueName: \"kubernetes.io/projected/120fd687-33f8-4530-9a07-c0388d53fd4e-kube-api-access-44b8b\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132490 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec1ac3-ea25-430b-8934-fda54a06648c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132500 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132511 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.132524 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/120fd687-33f8-4530-9a07-c0388d53fd4e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.160796 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.199562 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-config-data" (OuterVolumeSpecName: "config-data") pod "96b236bc-d5d0-4beb-a5cb-a93384cbaca2" (UID: "96b236bc-d5d0-4beb-a5cb-a93384cbaca2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.233804 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.233834 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96b236bc-d5d0-4beb-a5cb-a93384cbaca2-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.263616 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.266017 4811 scope.go:117] "RemoveContainer" containerID="50c648e420c733a8c5868c472084552bba3edf2a988519678694481205f719e1" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.334333 4811 scope.go:117] "RemoveContainer" containerID="1a257c8830ba6170421d2471c2be9db66903601803564b624f1aad7a190187a4" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.355961 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6632e30b-e20e-44d0-848f-90221b0e31f8" path="/var/lib/kubelet/pods/6632e30b-e20e-44d0-848f-90221b0e31f8/volumes" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.978901 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.982125 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f35795d5-ffac-4851-914a-00dc84496f91","Type":"ContainerStarted","Data":"84f6ea378d0b165999f2606c252834dba8614670922d49e48387042f20f1cf01"} Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.988529 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lb2cx" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.988537 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lb2cx" event={"ID":"9dec1ac3-ea25-430b-8934-fda54a06648c","Type":"ContainerDied","Data":"4dc4681da8c7653c406200c617befead830f745baf6c433eaf1c23731f5d8a18"} Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.988633 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4dc4681da8c7653c406200c617befead830f745baf6c433eaf1c23731f5d8a18" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.993272 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"54d8044d-232b-4d32-a2ed-fa2520b6513f","Type":"ContainerStarted","Data":"039d7a3660d21b25f1bc174e77b3130cc47d2cdf0049929c92da48d3adb0d4b0"} Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.993311 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"54d8044d-232b-4d32-a2ed-fa2520b6513f","Type":"ContainerStarted","Data":"2e2808ccbcdbb41999dd2620b1eff19f3bdb7b5558d3f05fd590eda1d1e42396"} Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.996865 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x2dtb" event={"ID":"120fd687-33f8-4530-9a07-c0388d53fd4e","Type":"ContainerDied","Data":"4ad246818f95a000a69385a690c2b5d6e8f87fb7943d54848a663094aa90ad07"} Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.996901 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ad246818f95a000a69385a690c2b5d6e8f87fb7943d54848a663094aa90ad07" Jan 28 16:08:26 crc kubenswrapper[4811]: I0128 16:08:26.997170 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x2dtb" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.020571 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.033922 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.041802 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044078 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb09ee7-ca68-4a03-9190-242af91783b1" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044174 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb09ee7-ca68-4a03-9190-242af91783b1" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044242 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db6998b-1c77-4473-8656-6e64c5f14688" containerName="mariadb-account-create-update" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044336 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db6998b-1c77-4473-8656-6e64c5f14688" containerName="mariadb-account-create-update" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044446 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dec1ac3-ea25-430b-8934-fda54a06648c" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044533 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dec1ac3-ea25-430b-8934-fda54a06648c" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044594 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="120fd687-33f8-4530-9a07-c0388d53fd4e" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044652 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="120fd687-33f8-4530-9a07-c0388d53fd4e" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044711 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-notification-agent" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044765 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-notification-agent" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044824 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa06bb82-ce18-4cad-b539-b9ec87700437" containerName="mariadb-account-create-update" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044873 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa06bb82-ce18-4cad-b539-b9ec87700437" containerName="mariadb-account-create-update" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.044931 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="sg-core" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.044981 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="sg-core" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.045036 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-central-agent" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045085 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-central-agent" Jan 28 16:08:27 crc kubenswrapper[4811]: E0128 16:08:27.045140 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="proxy-httpd" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045195 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="proxy-httpd" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045418 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dec1ac3-ea25-430b-8934-fda54a06648c" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045528 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="proxy-httpd" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045605 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-central-agent" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045658 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="120fd687-33f8-4530-9a07-c0388d53fd4e" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045712 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cb09ee7-ca68-4a03-9190-242af91783b1" containerName="mariadb-database-create" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045774 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="ceilometer-notification-agent" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045826 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db6998b-1c77-4473-8656-6e64c5f14688" containerName="mariadb-account-create-update" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045886 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa06bb82-ce18-4cad-b539-b9ec87700437" containerName="mariadb-account-create-update" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.045941 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" containerName="sg-core" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.049561 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.052866 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.053030 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.077940 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.152846 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.152891 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-config-data\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.152940 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-run-httpd\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.152991 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.153007 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-log-httpd\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.153049 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf8bg\" (UniqueName: \"kubernetes.io/projected/3987ca31-a32f-4709-895b-604c9772c51d-kube-api-access-qf8bg\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.153095 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-scripts\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.254617 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-scripts\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.257057 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.257105 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-config-data\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.257196 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-run-httpd\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.257340 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.257367 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-log-httpd\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.257596 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf8bg\" (UniqueName: \"kubernetes.io/projected/3987ca31-a32f-4709-895b-604c9772c51d-kube-api-access-qf8bg\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.260545 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-scripts\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.260881 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-run-httpd\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.261050 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-log-httpd\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.263202 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.264865 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.264932 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-config-data\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.273786 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf8bg\" (UniqueName: \"kubernetes.io/projected/3987ca31-a32f-4709-895b-604c9772c51d-kube-api-access-qf8bg\") pod \"ceilometer-0\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " pod="openstack/ceilometer-0" Jan 28 16:08:27 crc kubenswrapper[4811]: I0128 16:08:27.560772 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.007278 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f35795d5-ffac-4851-914a-00dc84496f91","Type":"ContainerStarted","Data":"a156c42e817f2655b9c4afbf1946c7359670560b8886dbd5e1f1475d498cce7d"} Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.015616 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"54d8044d-232b-4d32-a2ed-fa2520b6513f","Type":"ContainerStarted","Data":"d388772dd1846d95d234d7f0fb94958aae8f34f960a0d56e2efdfefc4e673f8d"} Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.042629 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.042606319 podStartE2EDuration="5.042606319s" podCreationTimestamp="2026-01-28 16:08:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:28.037351807 +0000 UTC m=+1400.791715400" watchObservedRunningTime="2026-01-28 16:08:28.042606319 +0000 UTC m=+1400.796969902" Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.082538 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.082518939 podStartE2EDuration="4.082518939s" podCreationTimestamp="2026-01-28 16:08:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:08:28.076027753 +0000 UTC m=+1400.830391336" watchObservedRunningTime="2026-01-28 16:08:28.082518939 +0000 UTC m=+1400.836882522" Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.097368 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:28 crc kubenswrapper[4811]: W0128 16:08:28.117058 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3987ca31_a32f_4709_895b_604c9772c51d.slice/crio-9fb7bc44ca06232001b701b89f0b95de000de33539c2d0238b703a763b23085e WatchSource:0}: Error finding container 9fb7bc44ca06232001b701b89f0b95de000de33539c2d0238b703a763b23085e: Status 404 returned error can't find the container with id 9fb7bc44ca06232001b701b89f0b95de000de33539c2d0238b703a763b23085e Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.350485 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b236bc-d5d0-4beb-a5cb-a93384cbaca2" path="/var/lib/kubelet/pods/96b236bc-d5d0-4beb-a5cb-a93384cbaca2/volumes" Jan 28 16:08:28 crc kubenswrapper[4811]: I0128 16:08:28.815132 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:08:29 crc kubenswrapper[4811]: I0128 16:08:29.039877 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerStarted","Data":"9fb7bc44ca06232001b701b89f0b95de000de33539c2d0238b703a763b23085e"} Jan 28 16:08:29 crc kubenswrapper[4811]: I0128 16:08:29.872527 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.049466 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerStarted","Data":"62a41ca0f587266064b53ff0c1d1515da28b24385a957528b08b0cc15fd53e5f"} Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.049511 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerStarted","Data":"d0ff7ed50d130bd94100bf7ab8cdd1f2adc6c3b3019b7aa25b20f64ffff56689"} Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.382267 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k98kw"] Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.383865 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.388891 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.390090 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.390255 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tfmpk" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.403023 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k98kw"] Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.514332 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-scripts\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.514384 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlpsc\" (UniqueName: \"kubernetes.io/projected/10427a85-4d03-4473-8be9-a246485b3594-kube-api-access-mlpsc\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.514460 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.514736 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.616245 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.616414 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-scripts\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.616449 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlpsc\" (UniqueName: \"kubernetes.io/projected/10427a85-4d03-4473-8be9-a246485b3594-kube-api-access-mlpsc\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.616535 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.621077 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-scripts\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.621436 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.622310 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.640555 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlpsc\" (UniqueName: \"kubernetes.io/projected/10427a85-4d03-4473-8be9-a246485b3594-kube-api-access-mlpsc\") pod \"nova-cell0-conductor-db-sync-k98kw\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:30 crc kubenswrapper[4811]: I0128 16:08:30.701969 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:08:31 crc kubenswrapper[4811]: I0128 16:08:31.135629 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerStarted","Data":"4c59ecf2511c26a5a4735896af0c6edf6e3958516b50bf7b59a85933c152f4fe"} Jan 28 16:08:31 crc kubenswrapper[4811]: W0128 16:08:31.307263 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10427a85_4d03_4473_8be9_a246485b3594.slice/crio-a302b3559f1b8c1362f907aeb413192a5dcbf5f2d2f0189f42aba098c942a3d8 WatchSource:0}: Error finding container a302b3559f1b8c1362f907aeb413192a5dcbf5f2d2f0189f42aba098c942a3d8: Status 404 returned error can't find the container with id a302b3559f1b8c1362f907aeb413192a5dcbf5f2d2f0189f42aba098c942a3d8 Jan 28 16:08:31 crc kubenswrapper[4811]: I0128 16:08:31.308677 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k98kw"] Jan 28 16:08:32 crc kubenswrapper[4811]: I0128 16:08:32.153906 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k98kw" event={"ID":"10427a85-4d03-4473-8be9-a246485b3594","Type":"ContainerStarted","Data":"a302b3559f1b8c1362f907aeb413192a5dcbf5f2d2f0189f42aba098c942a3d8"} Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.164160 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerStarted","Data":"eca07bf8361eb3a319765fe8c7be9f17f487209ba669bfb92fa8c3b6ea59efa0"} Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.164379 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-central-agent" containerID="cri-o://d0ff7ed50d130bd94100bf7ab8cdd1f2adc6c3b3019b7aa25b20f64ffff56689" gracePeriod=30 Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.175886 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.175894 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-notification-agent" containerID="cri-o://62a41ca0f587266064b53ff0c1d1515da28b24385a957528b08b0cc15fd53e5f" gracePeriod=30 Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.175987 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="proxy-httpd" containerID="cri-o://eca07bf8361eb3a319765fe8c7be9f17f487209ba669bfb92fa8c3b6ea59efa0" gracePeriod=30 Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.176074 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="sg-core" containerID="cri-o://4c59ecf2511c26a5a4735896af0c6edf6e3958516b50bf7b59a85933c152f4fe" gracePeriod=30 Jan 28 16:08:33 crc kubenswrapper[4811]: I0128 16:08:33.238629 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.770100345 podStartE2EDuration="6.238612942s" podCreationTimestamp="2026-01-28 16:08:27 +0000 UTC" firstStartedPulling="2026-01-28 16:08:28.12172081 +0000 UTC m=+1400.876084383" lastFinishedPulling="2026-01-28 16:08:32.590233397 +0000 UTC m=+1405.344596980" observedRunningTime="2026-01-28 16:08:33.233306549 +0000 UTC m=+1405.987670132" watchObservedRunningTime="2026-01-28 16:08:33.238612942 +0000 UTC m=+1405.992976525" Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.178867 4811 generic.go:334] "Generic (PLEG): container finished" podID="3987ca31-a32f-4709-895b-604c9772c51d" containerID="eca07bf8361eb3a319765fe8c7be9f17f487209ba669bfb92fa8c3b6ea59efa0" exitCode=0 Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.178900 4811 generic.go:334] "Generic (PLEG): container finished" podID="3987ca31-a32f-4709-895b-604c9772c51d" containerID="4c59ecf2511c26a5a4735896af0c6edf6e3958516b50bf7b59a85933c152f4fe" exitCode=2 Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.178909 4811 generic.go:334] "Generic (PLEG): container finished" podID="3987ca31-a32f-4709-895b-604c9772c51d" containerID="62a41ca0f587266064b53ff0c1d1515da28b24385a957528b08b0cc15fd53e5f" exitCode=0 Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.178928 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerDied","Data":"eca07bf8361eb3a319765fe8c7be9f17f487209ba669bfb92fa8c3b6ea59efa0"} Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.178954 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerDied","Data":"4c59ecf2511c26a5a4735896af0c6edf6e3958516b50bf7b59a85933c152f4fe"} Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.178965 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerDied","Data":"62a41ca0f587266064b53ff0c1d1515da28b24385a957528b08b0cc15fd53e5f"} Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.373653 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.374019 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.407317 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:34 crc kubenswrapper[4811]: I0128 16:08:34.428179 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.103504 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.190634 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-775d8bf54b-pxqbl"] Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.190853 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-775d8bf54b-pxqbl" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-api" containerID="cri-o://638bf6a409a2ee8ef91ccc30517ca80008633479d4f2b150788e6d75c6463742" gracePeriod=30 Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.191216 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-775d8bf54b-pxqbl" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-httpd" containerID="cri-o://22ebe26a0dcd5c950f597ff3f4709d84054b91215a6052d84aa696ebda1c436f" gracePeriod=30 Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.235268 4811 generic.go:334] "Generic (PLEG): container finished" podID="3987ca31-a32f-4709-895b-604c9772c51d" containerID="d0ff7ed50d130bd94100bf7ab8cdd1f2adc6c3b3019b7aa25b20f64ffff56689" exitCode=0 Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.236866 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerDied","Data":"d0ff7ed50d130bd94100bf7ab8cdd1f2adc6c3b3019b7aa25b20f64ffff56689"} Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.236917 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.237052 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.389227 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.389588 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.437836 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 28 16:08:35 crc kubenswrapper[4811]: I0128 16:08:35.459999 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 28 16:08:36 crc kubenswrapper[4811]: I0128 16:08:36.246064 4811 generic.go:334] "Generic (PLEG): container finished" podID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerID="22ebe26a0dcd5c950f597ff3f4709d84054b91215a6052d84aa696ebda1c436f" exitCode=0 Jan 28 16:08:36 crc kubenswrapper[4811]: I0128 16:08:36.246133 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-775d8bf54b-pxqbl" event={"ID":"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4","Type":"ContainerDied","Data":"22ebe26a0dcd5c950f597ff3f4709d84054b91215a6052d84aa696ebda1c436f"} Jan 28 16:08:36 crc kubenswrapper[4811]: I0128 16:08:36.246920 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 28 16:08:36 crc kubenswrapper[4811]: I0128 16:08:36.247014 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 28 16:08:37 crc kubenswrapper[4811]: I0128 16:08:37.255385 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 16:08:37 crc kubenswrapper[4811]: I0128 16:08:37.255802 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.267057 4811 generic.go:334] "Generic (PLEG): container finished" podID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerID="638bf6a409a2ee8ef91ccc30517ca80008633479d4f2b150788e6d75c6463742" exitCode=0 Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.267129 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-775d8bf54b-pxqbl" event={"ID":"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4","Type":"ContainerDied","Data":"638bf6a409a2ee8ef91ccc30517ca80008633479d4f2b150788e6d75c6463742"} Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.392276 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.392466 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.560969 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.931542 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 28 16:08:38 crc kubenswrapper[4811]: I0128 16:08:38.931674 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 16:08:39 crc kubenswrapper[4811]: I0128 16:08:39.187978 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.264072 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.336326 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-775d8bf54b-pxqbl" event={"ID":"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4","Type":"ContainerDied","Data":"dfbdd839f9a4cfb2fa6de12d44ff7dab4ce406a72e592fc45fcb39be4607f3ff"} Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.336640 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfbdd839f9a4cfb2fa6de12d44ff7dab4ce406a72e592fc45fcb39be4607f3ff" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.339003 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3987ca31-a32f-4709-895b-604c9772c51d","Type":"ContainerDied","Data":"9fb7bc44ca06232001b701b89f0b95de000de33539c2d0238b703a763b23085e"} Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.339045 4811 scope.go:117] "RemoveContainer" containerID="eca07bf8361eb3a319765fe8c7be9f17f487209ba669bfb92fa8c3b6ea59efa0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.339177 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.399975 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qf8bg\" (UniqueName: \"kubernetes.io/projected/3987ca31-a32f-4709-895b-604c9772c51d-kube-api-access-qf8bg\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.400032 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-sg-core-conf-yaml\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.400098 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-config-data\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.400143 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-run-httpd\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.400207 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-scripts\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.400241 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-log-httpd\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.400391 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-combined-ca-bundle\") pod \"3987ca31-a32f-4709-895b-604c9772c51d\" (UID: \"3987ca31-a32f-4709-895b-604c9772c51d\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.401329 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.401543 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.405216 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-scripts" (OuterVolumeSpecName: "scripts") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.405886 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3987ca31-a32f-4709-895b-604c9772c51d-kube-api-access-qf8bg" (OuterVolumeSpecName: "kube-api-access-qf8bg") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "kube-api-access-qf8bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.415390 4811 scope.go:117] "RemoveContainer" containerID="4c59ecf2511c26a5a4735896af0c6edf6e3958516b50bf7b59a85933c152f4fe" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.436507 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.499677 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.502904 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.502933 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.502942 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.502953 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3987ca31-a32f-4709-895b-604c9772c51d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.502961 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.502971 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qf8bg\" (UniqueName: \"kubernetes.io/projected/3987ca31-a32f-4709-895b-604c9772c51d-kube-api-access-qf8bg\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.517809 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-config-data" (OuterVolumeSpecName: "config-data") pod "3987ca31-a32f-4709-895b-604c9772c51d" (UID: "3987ca31-a32f-4709-895b-604c9772c51d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.547123 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.558157 4811 scope.go:117] "RemoveContainer" containerID="62a41ca0f587266064b53ff0c1d1515da28b24385a957528b08b0cc15fd53e5f" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.596751 4811 scope.go:117] "RemoveContainer" containerID="d0ff7ed50d130bd94100bf7ab8cdd1f2adc6c3b3019b7aa25b20f64ffff56689" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.606765 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3987ca31-a32f-4709-895b-604c9772c51d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.676578 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.686009 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.699719 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:42 crc kubenswrapper[4811]: E0128 16:08:42.700139 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-central-agent" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700158 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-central-agent" Jan 28 16:08:42 crc kubenswrapper[4811]: E0128 16:08:42.700168 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="sg-core" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700174 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="sg-core" Jan 28 16:08:42 crc kubenswrapper[4811]: E0128 16:08:42.700190 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-api" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700198 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-api" Jan 28 16:08:42 crc kubenswrapper[4811]: E0128 16:08:42.700220 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-notification-agent" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700227 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-notification-agent" Jan 28 16:08:42 crc kubenswrapper[4811]: E0128 16:08:42.700246 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="proxy-httpd" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700252 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="proxy-httpd" Jan 28 16:08:42 crc kubenswrapper[4811]: E0128 16:08:42.700268 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-httpd" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700275 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-httpd" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700486 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-api" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700503 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="sg-core" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700516 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="proxy-httpd" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700527 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-central-agent" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700536 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" containerName="neutron-httpd" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.700549 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3987ca31-a32f-4709-895b-604c9772c51d" containerName="ceilometer-notification-agent" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.702538 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.705310 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.707549 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-ovndb-tls-certs\") pod \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.707717 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpggq\" (UniqueName: \"kubernetes.io/projected/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-kube-api-access-jpggq\") pod \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.707829 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-config\") pod \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.707936 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-httpd-config\") pod \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.708013 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-combined-ca-bundle\") pod \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\" (UID: \"1a4cc403-44d2-4abe-a2fa-c40a29b5bab4\") " Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.711841 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.716172 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" (UID: "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.717895 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.723688 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-kube-api-access-jpggq" (OuterVolumeSpecName: "kube-api-access-jpggq") pod "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" (UID: "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4"). InnerVolumeSpecName "kube-api-access-jpggq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.765418 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-config" (OuterVolumeSpecName: "config") pod "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" (UID: "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.780800 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" (UID: "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.798626 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" (UID: "1a4cc403-44d2-4abe-a2fa-c40a29b5bab4"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.809779 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxsx7\" (UniqueName: \"kubernetes.io/projected/dd4f2684-a734-4d83-a620-6373fbb5e129-kube-api-access-pxsx7\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.809841 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.809867 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-run-httpd\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.809913 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.809939 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-scripts\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810045 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-config-data\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810075 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-log-httpd\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810163 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810176 4811 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810186 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpggq\" (UniqueName: \"kubernetes.io/projected/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-kube-api-access-jpggq\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810226 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.810259 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.911806 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxsx7\" (UniqueName: \"kubernetes.io/projected/dd4f2684-a734-4d83-a620-6373fbb5e129-kube-api-access-pxsx7\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.911869 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.912467 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-run-httpd\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.912501 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-run-httpd\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.912544 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.912570 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-scripts\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.912628 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-config-data\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.912664 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-log-httpd\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.913105 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-log-httpd\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.915500 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.917590 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-config-data\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.919361 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.919976 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-scripts\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:42 crc kubenswrapper[4811]: I0128 16:08:42.928789 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxsx7\" (UniqueName: \"kubernetes.io/projected/dd4f2684-a734-4d83-a620-6373fbb5e129-kube-api-access-pxsx7\") pod \"ceilometer-0\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " pod="openstack/ceilometer-0" Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.023844 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.356267 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k98kw" event={"ID":"10427a85-4d03-4473-8be9-a246485b3594","Type":"ContainerStarted","Data":"0f49c1f408b006371b9d24324b32ed0babd4053c0669c50182a9c6cec0d5e3b3"} Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.358054 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-775d8bf54b-pxqbl" Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.385174 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-k98kw" podStartSLOduration=2.454674948 podStartE2EDuration="13.385148665s" podCreationTimestamp="2026-01-28 16:08:30 +0000 UTC" firstStartedPulling="2026-01-28 16:08:31.310158168 +0000 UTC m=+1404.064521751" lastFinishedPulling="2026-01-28 16:08:42.240631885 +0000 UTC m=+1414.994995468" observedRunningTime="2026-01-28 16:08:43.37385118 +0000 UTC m=+1416.128214773" watchObservedRunningTime="2026-01-28 16:08:43.385148665 +0000 UTC m=+1416.139512248" Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.431232 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-775d8bf54b-pxqbl"] Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.438989 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-775d8bf54b-pxqbl"] Jan 28 16:08:43 crc kubenswrapper[4811]: I0128 16:08:43.483618 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:44 crc kubenswrapper[4811]: I0128 16:08:44.349317 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a4cc403-44d2-4abe-a2fa-c40a29b5bab4" path="/var/lib/kubelet/pods/1a4cc403-44d2-4abe-a2fa-c40a29b5bab4/volumes" Jan 28 16:08:44 crc kubenswrapper[4811]: I0128 16:08:44.350423 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3987ca31-a32f-4709-895b-604c9772c51d" path="/var/lib/kubelet/pods/3987ca31-a32f-4709-895b-604c9772c51d/volumes" Jan 28 16:08:44 crc kubenswrapper[4811]: I0128 16:08:44.369069 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerStarted","Data":"fb81a922839078743f8f406998c6a36c789a7a2c9f9a7fb23bbdd8efce7ec8a8"} Jan 28 16:08:47 crc kubenswrapper[4811]: I0128 16:08:47.413285 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerStarted","Data":"8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f"} Jan 28 16:08:48 crc kubenswrapper[4811]: I0128 16:08:48.428354 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerStarted","Data":"d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54"} Jan 28 16:08:48 crc kubenswrapper[4811]: I0128 16:08:48.428724 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerStarted","Data":"d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f"} Jan 28 16:08:50 crc kubenswrapper[4811]: I0128 16:08:50.458837 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerStarted","Data":"2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5"} Jan 28 16:08:50 crc kubenswrapper[4811]: I0128 16:08:50.459524 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:08:50 crc kubenswrapper[4811]: I0128 16:08:50.487205 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.00582956 podStartE2EDuration="8.487183065s" podCreationTimestamp="2026-01-28 16:08:42 +0000 UTC" firstStartedPulling="2026-01-28 16:08:43.498758429 +0000 UTC m=+1416.253122012" lastFinishedPulling="2026-01-28 16:08:49.980111934 +0000 UTC m=+1422.734475517" observedRunningTime="2026-01-28 16:08:50.485990773 +0000 UTC m=+1423.240354366" watchObservedRunningTime="2026-01-28 16:08:50.487183065 +0000 UTC m=+1423.241546648" Jan 28 16:08:54 crc kubenswrapper[4811]: I0128 16:08:54.923687 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:54 crc kubenswrapper[4811]: I0128 16:08:54.924751 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="proxy-httpd" containerID="cri-o://2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5" gracePeriod=30 Jan 28 16:08:54 crc kubenswrapper[4811]: I0128 16:08:54.924941 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="sg-core" containerID="cri-o://d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54" gracePeriod=30 Jan 28 16:08:54 crc kubenswrapper[4811]: I0128 16:08:54.925007 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-notification-agent" containerID="cri-o://d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f" gracePeriod=30 Jan 28 16:08:54 crc kubenswrapper[4811]: I0128 16:08:54.924691 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-central-agent" containerID="cri-o://8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f" gracePeriod=30 Jan 28 16:08:55 crc kubenswrapper[4811]: I0128 16:08:55.515625 4811 generic.go:334] "Generic (PLEG): container finished" podID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerID="2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5" exitCode=0 Jan 28 16:08:55 crc kubenswrapper[4811]: I0128 16:08:55.515966 4811 generic.go:334] "Generic (PLEG): container finished" podID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerID="d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54" exitCode=2 Jan 28 16:08:55 crc kubenswrapper[4811]: I0128 16:08:55.515985 4811 generic.go:334] "Generic (PLEG): container finished" podID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerID="d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f" exitCode=0 Jan 28 16:08:55 crc kubenswrapper[4811]: I0128 16:08:55.515723 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerDied","Data":"2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5"} Jan 28 16:08:55 crc kubenswrapper[4811]: I0128 16:08:55.516028 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerDied","Data":"d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54"} Jan 28 16:08:55 crc kubenswrapper[4811]: I0128 16:08:55.516081 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerDied","Data":"d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f"} Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.003515 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.099340 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-run-httpd\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.099425 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxsx7\" (UniqueName: \"kubernetes.io/projected/dd4f2684-a734-4d83-a620-6373fbb5e129-kube-api-access-pxsx7\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.099572 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-scripts\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.100007 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.100157 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.099702 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-log-httpd\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.100222 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-combined-ca-bundle\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.100245 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-sg-core-conf-yaml\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.100616 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-config-data\") pod \"dd4f2684-a734-4d83-a620-6373fbb5e129\" (UID: \"dd4f2684-a734-4d83-a620-6373fbb5e129\") " Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.101277 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.101297 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd4f2684-a734-4d83-a620-6373fbb5e129-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.104959 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd4f2684-a734-4d83-a620-6373fbb5e129-kube-api-access-pxsx7" (OuterVolumeSpecName: "kube-api-access-pxsx7") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "kube-api-access-pxsx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.113635 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-scripts" (OuterVolumeSpecName: "scripts") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.142778 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.184732 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.202836 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.202881 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.202891 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.202899 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxsx7\" (UniqueName: \"kubernetes.io/projected/dd4f2684-a734-4d83-a620-6373fbb5e129-kube-api-access-pxsx7\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.214278 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-config-data" (OuterVolumeSpecName: "config-data") pod "dd4f2684-a734-4d83-a620-6373fbb5e129" (UID: "dd4f2684-a734-4d83-a620-6373fbb5e129"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.304264 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd4f2684-a734-4d83-a620-6373fbb5e129-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.561018 4811 generic.go:334] "Generic (PLEG): container finished" podID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerID="8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f" exitCode=0 Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.561136 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.561154 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerDied","Data":"8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f"} Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.561256 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd4f2684-a734-4d83-a620-6373fbb5e129","Type":"ContainerDied","Data":"fb81a922839078743f8f406998c6a36c789a7a2c9f9a7fb23bbdd8efce7ec8a8"} Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.561315 4811 scope.go:117] "RemoveContainer" containerID="2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.584204 4811 scope.go:117] "RemoveContainer" containerID="d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.593500 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.602996 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.614863 4811 scope.go:117] "RemoveContainer" containerID="d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615160 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.615607 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="sg-core" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615628 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="sg-core" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.615651 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-notification-agent" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615658 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-notification-agent" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.615670 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-central-agent" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615678 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-central-agent" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.615691 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="proxy-httpd" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615698 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="proxy-httpd" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615905 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-central-agent" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615924 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="proxy-httpd" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615934 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="ceilometer-notification-agent" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.615945 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" containerName="sg-core" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.617816 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.620882 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.621546 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.639487 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.651389 4811 scope.go:117] "RemoveContainer" containerID="8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.712877 4811 scope.go:117] "RemoveContainer" containerID="2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.713803 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-run-httpd\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.713851 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-scripts\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.713928 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7549x\" (UniqueName: \"kubernetes.io/projected/726e91cb-f20b-4176-bd40-0d2253bb1707-kube-api-access-7549x\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.713960 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.713995 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-log-httpd\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.714114 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.714173 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-config-data\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.714764 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5\": container with ID starting with 2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5 not found: ID does not exist" containerID="2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.714806 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5"} err="failed to get container status \"2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5\": rpc error: code = NotFound desc = could not find container \"2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5\": container with ID starting with 2bf4a19b313e3309c9a1d23244cfb17b0801fdbcdda8f0aa8d6015ac3567fae5 not found: ID does not exist" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.714834 4811 scope.go:117] "RemoveContainer" containerID="d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.715158 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54\": container with ID starting with d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54 not found: ID does not exist" containerID="d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.715187 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54"} err="failed to get container status \"d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54\": rpc error: code = NotFound desc = could not find container \"d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54\": container with ID starting with d8d112b915b6d114b41c0bedfb0587154ee171b8d840019b370c5bdbbf1c0e54 not found: ID does not exist" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.715211 4811 scope.go:117] "RemoveContainer" containerID="d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.715556 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f\": container with ID starting with d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f not found: ID does not exist" containerID="d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.715583 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f"} err="failed to get container status \"d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f\": rpc error: code = NotFound desc = could not find container \"d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f\": container with ID starting with d53f663a8a63febf5b49bc6a973feec14dad33ddbc2bad306292200a817ae77f not found: ID does not exist" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.715598 4811 scope.go:117] "RemoveContainer" containerID="8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f" Jan 28 16:08:58 crc kubenswrapper[4811]: E0128 16:08:58.715795 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f\": container with ID starting with 8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f not found: ID does not exist" containerID="8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.715825 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f"} err="failed to get container status \"8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f\": rpc error: code = NotFound desc = could not find container \"8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f\": container with ID starting with 8e7c45be7c94bf0644688d878a181c14bcbf647125f4a872f3404048b940ce6f not found: ID does not exist" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.815887 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-run-httpd\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.815933 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-scripts\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.815984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7549x\" (UniqueName: \"kubernetes.io/projected/726e91cb-f20b-4176-bd40-0d2253bb1707-kube-api-access-7549x\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.816011 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.816027 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-log-httpd\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.816104 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.816132 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-config-data\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.816793 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-log-httpd\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.817491 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-run-httpd\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.819936 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.821037 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.821101 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-scripts\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.821857 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-config-data\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.834144 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7549x\" (UniqueName: \"kubernetes.io/projected/726e91cb-f20b-4176-bd40-0d2253bb1707-kube-api-access-7549x\") pod \"ceilometer-0\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " pod="openstack/ceilometer-0" Jan 28 16:08:58 crc kubenswrapper[4811]: I0128 16:08:58.986949 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:08:59 crc kubenswrapper[4811]: I0128 16:08:59.476375 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:08:59 crc kubenswrapper[4811]: I0128 16:08:59.570619 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerStarted","Data":"605ca69fba1dfafb9e19f1ac554d4b94e8804341a000c30a65783368b1edab87"} Jan 28 16:09:00 crc kubenswrapper[4811]: I0128 16:09:00.348709 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd4f2684-a734-4d83-a620-6373fbb5e129" path="/var/lib/kubelet/pods/dd4f2684-a734-4d83-a620-6373fbb5e129/volumes" Jan 28 16:09:00 crc kubenswrapper[4811]: I0128 16:09:00.581054 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerStarted","Data":"86a9ce1ee6a598f91fa5ddd10125ca218183e8e2571c9c6f19c1ef7c5ca8b01a"} Jan 28 16:09:01 crc kubenswrapper[4811]: I0128 16:09:01.591085 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerStarted","Data":"b54595b460d03992d41e0f94bc574753ac7714f0e15342526fcdf6512a200762"} Jan 28 16:09:01 crc kubenswrapper[4811]: I0128 16:09:01.591355 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerStarted","Data":"61d4fb1b858b16fac9b00cd9b6b475e592a7228779167af789965f5956469f50"} Jan 28 16:09:01 crc kubenswrapper[4811]: I0128 16:09:01.592749 4811 generic.go:334] "Generic (PLEG): container finished" podID="10427a85-4d03-4473-8be9-a246485b3594" containerID="0f49c1f408b006371b9d24324b32ed0babd4053c0669c50182a9c6cec0d5e3b3" exitCode=0 Jan 28 16:09:01 crc kubenswrapper[4811]: I0128 16:09:01.592791 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k98kw" event={"ID":"10427a85-4d03-4473-8be9-a246485b3594","Type":"ContainerDied","Data":"0f49c1f408b006371b9d24324b32ed0babd4053c0669c50182a9c6cec0d5e3b3"} Jan 28 16:09:02 crc kubenswrapper[4811]: I0128 16:09:02.948629 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:09:02 crc kubenswrapper[4811]: I0128 16:09:02.993307 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data\") pod \"10427a85-4d03-4473-8be9-a246485b3594\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " Jan 28 16:09:02 crc kubenswrapper[4811]: I0128 16:09:02.993406 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlpsc\" (UniqueName: \"kubernetes.io/projected/10427a85-4d03-4473-8be9-a246485b3594-kube-api-access-mlpsc\") pod \"10427a85-4d03-4473-8be9-a246485b3594\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " Jan 28 16:09:02 crc kubenswrapper[4811]: I0128 16:09:02.993565 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-combined-ca-bundle\") pod \"10427a85-4d03-4473-8be9-a246485b3594\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " Jan 28 16:09:02 crc kubenswrapper[4811]: I0128 16:09:02.993608 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-scripts\") pod \"10427a85-4d03-4473-8be9-a246485b3594\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " Jan 28 16:09:02 crc kubenswrapper[4811]: I0128 16:09:02.999775 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-scripts" (OuterVolumeSpecName: "scripts") pod "10427a85-4d03-4473-8be9-a246485b3594" (UID: "10427a85-4d03-4473-8be9-a246485b3594"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.001289 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10427a85-4d03-4473-8be9-a246485b3594-kube-api-access-mlpsc" (OuterVolumeSpecName: "kube-api-access-mlpsc") pod "10427a85-4d03-4473-8be9-a246485b3594" (UID: "10427a85-4d03-4473-8be9-a246485b3594"). InnerVolumeSpecName "kube-api-access-mlpsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:03 crc kubenswrapper[4811]: E0128 16:09:03.019496 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data podName:10427a85-4d03-4473-8be9-a246485b3594 nodeName:}" failed. No retries permitted until 2026-01-28 16:09:03.519465958 +0000 UTC m=+1436.273829541 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data") pod "10427a85-4d03-4473-8be9-a246485b3594" (UID: "10427a85-4d03-4473-8be9-a246485b3594") : error deleting /var/lib/kubelet/pods/10427a85-4d03-4473-8be9-a246485b3594/volume-subpaths: remove /var/lib/kubelet/pods/10427a85-4d03-4473-8be9-a246485b3594/volume-subpaths: no such file or directory Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.023500 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10427a85-4d03-4473-8be9-a246485b3594" (UID: "10427a85-4d03-4473-8be9-a246485b3594"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.087601 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.087659 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.095912 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.095942 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.095954 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlpsc\" (UniqueName: \"kubernetes.io/projected/10427a85-4d03-4473-8be9-a246485b3594-kube-api-access-mlpsc\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.606037 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data\") pod \"10427a85-4d03-4473-8be9-a246485b3594\" (UID: \"10427a85-4d03-4473-8be9-a246485b3594\") " Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.615389 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k98kw" event={"ID":"10427a85-4d03-4473-8be9-a246485b3594","Type":"ContainerDied","Data":"a302b3559f1b8c1362f907aeb413192a5dcbf5f2d2f0189f42aba098c942a3d8"} Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.615457 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a302b3559f1b8c1362f907aeb413192a5dcbf5f2d2f0189f42aba098c942a3d8" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.615527 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k98kw" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.622118 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data" (OuterVolumeSpecName: "config-data") pod "10427a85-4d03-4473-8be9-a246485b3594" (UID: "10427a85-4d03-4473-8be9-a246485b3594"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.708108 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10427a85-4d03-4473-8be9-a246485b3594-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.753997 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 16:09:03 crc kubenswrapper[4811]: E0128 16:09:03.754891 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10427a85-4d03-4473-8be9-a246485b3594" containerName="nova-cell0-conductor-db-sync" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.754917 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="10427a85-4d03-4473-8be9-a246485b3594" containerName="nova-cell0-conductor-db-sync" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.755647 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="10427a85-4d03-4473-8be9-a246485b3594" containerName="nova-cell0-conductor-db-sync" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.757003 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.771955 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.809981 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpvfr\" (UniqueName: \"kubernetes.io/projected/f76f6c0a-7254-4c76-86c3-e1004cf38303-kube-api-access-gpvfr\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.810130 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.810226 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.912251 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.912684 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.912801 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpvfr\" (UniqueName: \"kubernetes.io/projected/f76f6c0a-7254-4c76-86c3-e1004cf38303-kube-api-access-gpvfr\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.920216 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.921268 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:03 crc kubenswrapper[4811]: I0128 16:09:03.934273 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpvfr\" (UniqueName: \"kubernetes.io/projected/f76f6c0a-7254-4c76-86c3-e1004cf38303-kube-api-access-gpvfr\") pod \"nova-cell0-conductor-0\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:04 crc kubenswrapper[4811]: I0128 16:09:04.130956 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:04 crc kubenswrapper[4811]: I0128 16:09:04.626849 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerStarted","Data":"db4969e7eae14d649bae46a40da595e17d04d84ff57158f7ede3e074c7f88727"} Jan 28 16:09:04 crc kubenswrapper[4811]: I0128 16:09:04.628135 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:09:04 crc kubenswrapper[4811]: I0128 16:09:04.654847 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 16:09:04 crc kubenswrapper[4811]: W0128 16:09:04.660383 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf76f6c0a_7254_4c76_86c3_e1004cf38303.slice/crio-9161aabcf2a1884c4fdb16c359c2594471405799187bda220493499b15fcb2c3 WatchSource:0}: Error finding container 9161aabcf2a1884c4fdb16c359c2594471405799187bda220493499b15fcb2c3: Status 404 returned error can't find the container with id 9161aabcf2a1884c4fdb16c359c2594471405799187bda220493499b15fcb2c3 Jan 28 16:09:04 crc kubenswrapper[4811]: I0128 16:09:04.666721 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.497967799 podStartE2EDuration="6.666695705s" podCreationTimestamp="2026-01-28 16:08:58 +0000 UTC" firstStartedPulling="2026-01-28 16:08:59.468475604 +0000 UTC m=+1432.222839187" lastFinishedPulling="2026-01-28 16:09:03.63720351 +0000 UTC m=+1436.391567093" observedRunningTime="2026-01-28 16:09:04.656802807 +0000 UTC m=+1437.411166390" watchObservedRunningTime="2026-01-28 16:09:04.666695705 +0000 UTC m=+1437.421059288" Jan 28 16:09:05 crc kubenswrapper[4811]: I0128 16:09:05.636384 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f76f6c0a-7254-4c76-86c3-e1004cf38303","Type":"ContainerStarted","Data":"8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6"} Jan 28 16:09:05 crc kubenswrapper[4811]: I0128 16:09:05.636723 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f76f6c0a-7254-4c76-86c3-e1004cf38303","Type":"ContainerStarted","Data":"9161aabcf2a1884c4fdb16c359c2594471405799187bda220493499b15fcb2c3"} Jan 28 16:09:05 crc kubenswrapper[4811]: I0128 16:09:05.637142 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:05 crc kubenswrapper[4811]: I0128 16:09:05.657692 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.657674694 podStartE2EDuration="2.657674694s" podCreationTimestamp="2026-01-28 16:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:05.649150223 +0000 UTC m=+1438.403513806" watchObservedRunningTime="2026-01-28 16:09:05.657674694 +0000 UTC m=+1438.412038277" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.155025 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.659484 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-vmzwm"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.661061 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.666987 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.667145 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.672392 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vmzwm"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.722507 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.722575 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-scripts\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.722618 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-config-data\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.722780 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t92rw\" (UniqueName: \"kubernetes.io/projected/d00b71d9-55f0-4ce4-9c27-1471372ef467-kube-api-access-t92rw\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.824719 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t92rw\" (UniqueName: \"kubernetes.io/projected/d00b71d9-55f0-4ce4-9c27-1471372ef467-kube-api-access-t92rw\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.824781 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.824810 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-scripts\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.824837 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-config-data\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.832464 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-config-data\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.841096 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.845830 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t92rw\" (UniqueName: \"kubernetes.io/projected/d00b71d9-55f0-4ce4-9c27-1471372ef467-kube-api-access-t92rw\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.846979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-scripts\") pod \"nova-cell0-cell-mapping-vmzwm\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.878157 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.880312 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.883745 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.885994 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.887346 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.891871 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.924617 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927506 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927583 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfdtn\" (UniqueName: \"kubernetes.io/projected/4e0bf01f-9db1-413a-800b-feaf02565788-kube-api-access-lfdtn\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927610 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmnqb\" (UniqueName: \"kubernetes.io/projected/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-kube-api-access-kmnqb\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927638 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-config-data\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927655 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-logs\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927697 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.927775 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.944698 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.980284 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.983733 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.987405 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:09 crc kubenswrapper[4811]: I0128 16:09:09.990785 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038405 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038494 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfdtn\" (UniqueName: \"kubernetes.io/projected/4e0bf01f-9db1-413a-800b-feaf02565788-kube-api-access-lfdtn\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038527 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmnqb\" (UniqueName: \"kubernetes.io/projected/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-kube-api-access-kmnqb\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038556 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-config-data\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038582 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-logs\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038630 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038714 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-config-data\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038768 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038859 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038900 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2sn9\" (UniqueName: \"kubernetes.io/projected/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-kube-api-access-f2sn9\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.038932 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-logs\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.083392 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.088202 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-logs\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.090679 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.095632 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.098412 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmnqb\" (UniqueName: \"kubernetes.io/projected/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-kube-api-access-kmnqb\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.098871 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.108207 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-config-data\") pod \"nova-api-0\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.122133 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfdtn\" (UniqueName: \"kubernetes.io/projected/4e0bf01f-9db1-413a-800b-feaf02565788-kube-api-access-lfdtn\") pod \"nova-cell1-novncproxy-0\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.181376 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-config-data\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.181561 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.181609 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2sn9\" (UniqueName: \"kubernetes.io/projected/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-kube-api-access-f2sn9\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.181633 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-logs\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.182482 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-logs\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.198353 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.198887 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-config-data\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.238381 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.240033 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.257655 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2sn9\" (UniqueName: \"kubernetes.io/projected/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-kube-api-access-f2sn9\") pod \"nova-metadata-0\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.264629 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.275029 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.287693 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.288195 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.389499 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qqht\" (UniqueName: \"kubernetes.io/projected/8088f76a-3481-4874-9ec2-b392869e4600-kube-api-access-8qqht\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.409603 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-config-data\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.409904 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.399734 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-4f9jb"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.419194 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-4f9jb"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.419585 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.511949 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.512465 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qqht\" (UniqueName: \"kubernetes.io/projected/8088f76a-3481-4874-9ec2-b392869e4600-kube-api-access-8qqht\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.512584 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-config-data\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.518570 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.519922 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-config-data\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.521675 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.540138 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qqht\" (UniqueName: \"kubernetes.io/projected/8088f76a-3481-4874-9ec2-b392869e4600-kube-api-access-8qqht\") pod \"nova-scheduler-0\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.600680 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.614936 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-config\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.615337 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.615471 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.615497 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.615512 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x9ss\" (UniqueName: \"kubernetes.io/projected/c4a4b149-cbd4-4b98-8b5d-9585a386f681-kube-api-access-6x9ss\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.615652 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-svc\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.719515 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-svc\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.719579 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-config\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.719610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.719639 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.719658 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.719674 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x9ss\" (UniqueName: \"kubernetes.io/projected/c4a4b149-cbd4-4b98-8b5d-9585a386f681-kube-api-access-6x9ss\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.720738 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.720810 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.721329 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-svc\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.721359 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.722358 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-config\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.748202 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x9ss\" (UniqueName: \"kubernetes.io/projected/c4a4b149-cbd4-4b98-8b5d-9585a386f681-kube-api-access-6x9ss\") pod \"dnsmasq-dns-5bfb54f9b5-4f9jb\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: W0128 16:09:10.770075 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd00b71d9_55f0_4ce4_9c27_1471372ef467.slice/crio-cbdfd6494b8b3c8e1df95a53c8c809c47d3af989bd91517b26ffa30fc0173a74 WatchSource:0}: Error finding container cbdfd6494b8b3c8e1df95a53c8c809c47d3af989bd91517b26ffa30fc0173a74: Status 404 returned error can't find the container with id cbdfd6494b8b3c8e1df95a53c8c809c47d3af989bd91517b26ffa30fc0173a74 Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.771191 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vmzwm"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.809169 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.968761 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:10 crc kubenswrapper[4811]: I0128 16:09:10.996500 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.179307 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.195579 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7kgb2"] Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.196930 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.199971 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.200339 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.224159 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7kgb2"] Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.230058 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.230161 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-scripts\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.230270 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnkh7\" (UniqueName: \"kubernetes.io/projected/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-kube-api-access-xnkh7\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.230337 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-config-data\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.331974 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.332294 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-scripts\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.332453 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnkh7\" (UniqueName: \"kubernetes.io/projected/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-kube-api-access-xnkh7\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.332575 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-config-data\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.337486 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.337558 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-config-data\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.337879 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-scripts\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: W0128 16:09:11.352772 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8088f76a_3481_4874_9ec2_b392869e4600.slice/crio-ec5801da1a666e108dbb8d55df3f3691c3ad2811c5246c2acc79978182258498 WatchSource:0}: Error finding container ec5801da1a666e108dbb8d55df3f3691c3ad2811c5246c2acc79978182258498: Status 404 returned error can't find the container with id ec5801da1a666e108dbb8d55df3f3691c3ad2811c5246c2acc79978182258498 Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.352894 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnkh7\" (UniqueName: \"kubernetes.io/projected/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-kube-api-access-xnkh7\") pod \"nova-cell1-conductor-db-sync-7kgb2\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.357162 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.457343 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-4f9jb"] Jan 28 16:09:11 crc kubenswrapper[4811]: W0128 16:09:11.457484 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4a4b149_cbd4_4b98_8b5d_9585a386f681.slice/crio-4f13d314dbb6a1201dc356662b9e6ce4d36f72cfa561dec434ce84ae9a7e91d9 WatchSource:0}: Error finding container 4f13d314dbb6a1201dc356662b9e6ce4d36f72cfa561dec434ce84ae9a7e91d9: Status 404 returned error can't find the container with id 4f13d314dbb6a1201dc356662b9e6ce4d36f72cfa561dec434ce84ae9a7e91d9 Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.528231 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.705713 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8088f76a-3481-4874-9ec2-b392869e4600","Type":"ContainerStarted","Data":"ec5801da1a666e108dbb8d55df3f3691c3ad2811c5246c2acc79978182258498"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.707896 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e62bbb8a-9beb-41da-96c2-c81d6ba94f80","Type":"ContainerStarted","Data":"3d3c45c12394211285145829171a6969ac2651ba3a423161684251fce9e61126"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.709441 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" event={"ID":"c4a4b149-cbd4-4b98-8b5d-9585a386f681","Type":"ContainerStarted","Data":"4f13d314dbb6a1201dc356662b9e6ce4d36f72cfa561dec434ce84ae9a7e91d9"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.712379 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4e0bf01f-9db1-413a-800b-feaf02565788","Type":"ContainerStarted","Data":"88a23539acfe786aa17bcf851ec8b99b374dd171026ecefd4edf52054f2431ff"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.714214 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ba207bf-701f-4044-bd7e-5cd668e1dcf6","Type":"ContainerStarted","Data":"1d3f17766a2d374ff164c2ee74f333d0a27d7bf629a8eedcc5d05aea203131bc"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.728862 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vmzwm" event={"ID":"d00b71d9-55f0-4ce4-9c27-1471372ef467","Type":"ContainerStarted","Data":"e82d4770dbd8df1af3c60b19065079eef075e181597a6fe329d2bfc69a97a55f"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.728904 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vmzwm" event={"ID":"d00b71d9-55f0-4ce4-9c27-1471372ef467","Type":"ContainerStarted","Data":"cbdfd6494b8b3c8e1df95a53c8c809c47d3af989bd91517b26ffa30fc0173a74"} Jan 28 16:09:11 crc kubenswrapper[4811]: I0128 16:09:11.756815 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-vmzwm" podStartSLOduration=2.756797519 podStartE2EDuration="2.756797519s" podCreationTimestamp="2026-01-28 16:09:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:11.744134905 +0000 UTC m=+1444.498498498" watchObservedRunningTime="2026-01-28 16:09:11.756797519 +0000 UTC m=+1444.511161102" Jan 28 16:09:12 crc kubenswrapper[4811]: I0128 16:09:12.031929 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7kgb2"] Jan 28 16:09:12 crc kubenswrapper[4811]: I0128 16:09:12.741568 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" event={"ID":"d41fbd0a-fb75-4597-a9ed-0dff56b7b635","Type":"ContainerStarted","Data":"fa157babbe500c1563c46cb52c30c82654096cb6b82bef1d0c2006eedc63c1d2"} Jan 28 16:09:12 crc kubenswrapper[4811]: I0128 16:09:12.741870 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" event={"ID":"d41fbd0a-fb75-4597-a9ed-0dff56b7b635","Type":"ContainerStarted","Data":"a3cb3a2f97729347201d3151feb774dd86e493c3ffcc847999e5f7bf1b9a885e"} Jan 28 16:09:12 crc kubenswrapper[4811]: I0128 16:09:12.749019 4811 generic.go:334] "Generic (PLEG): container finished" podID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerID="60fc5e97ee0333cff729231505c0e54f88165f1cba7b16d4d350d4b4f2cd7f7c" exitCode=0 Jan 28 16:09:12 crc kubenswrapper[4811]: I0128 16:09:12.749885 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" event={"ID":"c4a4b149-cbd4-4b98-8b5d-9585a386f681","Type":"ContainerDied","Data":"60fc5e97ee0333cff729231505c0e54f88165f1cba7b16d4d350d4b4f2cd7f7c"} Jan 28 16:09:12 crc kubenswrapper[4811]: I0128 16:09:12.759895 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" podStartSLOduration=1.7598761779999998 podStartE2EDuration="1.759876178s" podCreationTimestamp="2026-01-28 16:09:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:12.755482999 +0000 UTC m=+1445.509846602" watchObservedRunningTime="2026-01-28 16:09:12.759876178 +0000 UTC m=+1445.514239761" Jan 28 16:09:14 crc kubenswrapper[4811]: I0128 16:09:14.170996 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:14 crc kubenswrapper[4811]: I0128 16:09:14.225638 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.788681 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e62bbb8a-9beb-41da-96c2-c81d6ba94f80","Type":"ContainerStarted","Data":"466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.788989 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e62bbb8a-9beb-41da-96c2-c81d6ba94f80","Type":"ContainerStarted","Data":"924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.792164 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" event={"ID":"c4a4b149-cbd4-4b98-8b5d-9585a386f681","Type":"ContainerStarted","Data":"666b3a6eca458a563f70b41aeac57d36deb8775e0080957e620a6808c3007d9e"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.792288 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.793655 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4e0bf01f-9db1-413a-800b-feaf02565788","Type":"ContainerStarted","Data":"6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.793705 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="4e0bf01f-9db1-413a-800b-feaf02565788" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858" gracePeriod=30 Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.795615 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ba207bf-701f-4044-bd7e-5cd668e1dcf6","Type":"ContainerStarted","Data":"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.795651 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ba207bf-701f-4044-bd7e-5cd668e1dcf6","Type":"ContainerStarted","Data":"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.795746 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-log" containerID="cri-o://201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf" gracePeriod=30 Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.795837 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-metadata" containerID="cri-o://f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670" gracePeriod=30 Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.801388 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8088f76a-3481-4874-9ec2-b392869e4600","Type":"ContainerStarted","Data":"e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc"} Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.826926 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.033857637 podStartE2EDuration="6.826901099s" podCreationTimestamp="2026-01-28 16:09:09 +0000 UTC" firstStartedPulling="2026-01-28 16:09:10.982303804 +0000 UTC m=+1443.736667387" lastFinishedPulling="2026-01-28 16:09:14.775347266 +0000 UTC m=+1447.529710849" observedRunningTime="2026-01-28 16:09:15.816675192 +0000 UTC m=+1448.571038785" watchObservedRunningTime="2026-01-28 16:09:15.826901099 +0000 UTC m=+1448.581264682" Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.864829 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.093399163 podStartE2EDuration="6.864805028s" podCreationTimestamp="2026-01-28 16:09:09 +0000 UTC" firstStartedPulling="2026-01-28 16:09:11.000367974 +0000 UTC m=+1443.754731597" lastFinishedPulling="2026-01-28 16:09:14.771773869 +0000 UTC m=+1447.526137462" observedRunningTime="2026-01-28 16:09:15.857998564 +0000 UTC m=+1448.612362147" watchObservedRunningTime="2026-01-28 16:09:15.864805028 +0000 UTC m=+1448.619168621" Jan 28 16:09:15 crc kubenswrapper[4811]: I0128 16:09:15.920762 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.31734315 podStartE2EDuration="6.920734295s" podCreationTimestamp="2026-01-28 16:09:09 +0000 UTC" firstStartedPulling="2026-01-28 16:09:11.178897419 +0000 UTC m=+1443.933261002" lastFinishedPulling="2026-01-28 16:09:14.782288564 +0000 UTC m=+1447.536652147" observedRunningTime="2026-01-28 16:09:15.879944199 +0000 UTC m=+1448.634307782" watchObservedRunningTime="2026-01-28 16:09:15.920734295 +0000 UTC m=+1448.675097868" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.004312 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" podStartSLOduration=6.004287423 podStartE2EDuration="6.004287423s" podCreationTimestamp="2026-01-28 16:09:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:15.949763893 +0000 UTC m=+1448.704127476" watchObservedRunningTime="2026-01-28 16:09:16.004287423 +0000 UTC m=+1448.758651006" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.007626 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.588468825 podStartE2EDuration="6.007610062s" podCreationTimestamp="2026-01-28 16:09:10 +0000 UTC" firstStartedPulling="2026-01-28 16:09:11.354732069 +0000 UTC m=+1444.109095652" lastFinishedPulling="2026-01-28 16:09:14.773873296 +0000 UTC m=+1447.528236889" observedRunningTime="2026-01-28 16:09:15.984858235 +0000 UTC m=+1448.739221818" watchObservedRunningTime="2026-01-28 16:09:16.007610062 +0000 UTC m=+1448.761973635" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.591278 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.641869 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-config-data\") pod \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.641929 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-logs\") pod \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.642120 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-combined-ca-bundle\") pod \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.642147 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2sn9\" (UniqueName: \"kubernetes.io/projected/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-kube-api-access-f2sn9\") pod \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\" (UID: \"8ba207bf-701f-4044-bd7e-5cd668e1dcf6\") " Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.642520 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-logs" (OuterVolumeSpecName: "logs") pod "8ba207bf-701f-4044-bd7e-5cd668e1dcf6" (UID: "8ba207bf-701f-4044-bd7e-5cd668e1dcf6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.647834 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-kube-api-access-f2sn9" (OuterVolumeSpecName: "kube-api-access-f2sn9") pod "8ba207bf-701f-4044-bd7e-5cd668e1dcf6" (UID: "8ba207bf-701f-4044-bd7e-5cd668e1dcf6"). InnerVolumeSpecName "kube-api-access-f2sn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.681718 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ba207bf-701f-4044-bd7e-5cd668e1dcf6" (UID: "8ba207bf-701f-4044-bd7e-5cd668e1dcf6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.697845 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-config-data" (OuterVolumeSpecName: "config-data") pod "8ba207bf-701f-4044-bd7e-5cd668e1dcf6" (UID: "8ba207bf-701f-4044-bd7e-5cd668e1dcf6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.744215 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.744487 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2sn9\" (UniqueName: \"kubernetes.io/projected/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-kube-api-access-f2sn9\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.744570 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.744657 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba207bf-701f-4044-bd7e-5cd668e1dcf6-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.811694 4811 generic.go:334] "Generic (PLEG): container finished" podID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerID="f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670" exitCode=0 Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.811982 4811 generic.go:334] "Generic (PLEG): container finished" podID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerID="201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf" exitCode=143 Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.812879 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.814637 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ba207bf-701f-4044-bd7e-5cd668e1dcf6","Type":"ContainerDied","Data":"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670"} Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.814864 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ba207bf-701f-4044-bd7e-5cd668e1dcf6","Type":"ContainerDied","Data":"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf"} Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.814965 4811 scope.go:117] "RemoveContainer" containerID="f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.814993 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ba207bf-701f-4044-bd7e-5cd668e1dcf6","Type":"ContainerDied","Data":"1d3f17766a2d374ff164c2ee74f333d0a27d7bf629a8eedcc5d05aea203131bc"} Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.848481 4811 scope.go:117] "RemoveContainer" containerID="201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.857258 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.882612 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.885719 4811 scope.go:117] "RemoveContainer" containerID="f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670" Jan 28 16:09:16 crc kubenswrapper[4811]: E0128 16:09:16.915616 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670\": container with ID starting with f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670 not found: ID does not exist" containerID="f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.915674 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670"} err="failed to get container status \"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670\": rpc error: code = NotFound desc = could not find container \"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670\": container with ID starting with f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670 not found: ID does not exist" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.915710 4811 scope.go:117] "RemoveContainer" containerID="201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf" Jan 28 16:09:16 crc kubenswrapper[4811]: E0128 16:09:16.919139 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf\": container with ID starting with 201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf not found: ID does not exist" containerID="201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.919205 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf"} err="failed to get container status \"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf\": rpc error: code = NotFound desc = could not find container \"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf\": container with ID starting with 201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf not found: ID does not exist" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.919233 4811 scope.go:117] "RemoveContainer" containerID="f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.920862 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670"} err="failed to get container status \"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670\": rpc error: code = NotFound desc = could not find container \"f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670\": container with ID starting with f7d6963f4297169d3e9fa0c3ad09ecb03aca074b609378ee8d1f65cc9733b670 not found: ID does not exist" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.920907 4811 scope.go:117] "RemoveContainer" containerID="201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.921242 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf"} err="failed to get container status \"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf\": rpc error: code = NotFound desc = could not find container \"201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf\": container with ID starting with 201e01b2dc8dbb9cae2617fec4527fcaa0acd9370d8d87f676d69ba27e5880bf not found: ID does not exist" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.936516 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:16 crc kubenswrapper[4811]: E0128 16:09:16.937031 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-log" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.937055 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-log" Jan 28 16:09:16 crc kubenswrapper[4811]: E0128 16:09:16.937104 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-metadata" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.937113 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-metadata" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.937342 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-metadata" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.937378 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" containerName="nova-metadata-log" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.938400 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.943108 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.943152 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 28 16:09:16 crc kubenswrapper[4811]: I0128 16:09:16.947349 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.049153 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3e729db-bb8d-4a95-92f0-0061630e1967-logs\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.049207 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.049558 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzp46\" (UniqueName: \"kubernetes.io/projected/a3e729db-bb8d-4a95-92f0-0061630e1967-kube-api-access-wzp46\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.049615 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.049761 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-config-data\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.151758 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-config-data\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.151856 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3e729db-bb8d-4a95-92f0-0061630e1967-logs\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.151886 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.152000 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzp46\" (UniqueName: \"kubernetes.io/projected/a3e729db-bb8d-4a95-92f0-0061630e1967-kube-api-access-wzp46\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.152030 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.152347 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3e729db-bb8d-4a95-92f0-0061630e1967-logs\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.156761 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.157248 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.157616 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-config-data\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.173217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzp46\" (UniqueName: \"kubernetes.io/projected/a3e729db-bb8d-4a95-92f0-0061630e1967-kube-api-access-wzp46\") pod \"nova-metadata-0\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.256383 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:17 crc kubenswrapper[4811]: W0128 16:09:17.712655 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3e729db_bb8d_4a95_92f0_0061630e1967.slice/crio-1fc9a107a46cb976a9e04c18a4b79bb897cae549d0947eee6aca8a7b3da1d498 WatchSource:0}: Error finding container 1fc9a107a46cb976a9e04c18a4b79bb897cae549d0947eee6aca8a7b3da1d498: Status 404 returned error can't find the container with id 1fc9a107a46cb976a9e04c18a4b79bb897cae549d0947eee6aca8a7b3da1d498 Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.714209 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:17 crc kubenswrapper[4811]: I0128 16:09:17.834014 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3e729db-bb8d-4a95-92f0-0061630e1967","Type":"ContainerStarted","Data":"1fc9a107a46cb976a9e04c18a4b79bb897cae549d0947eee6aca8a7b3da1d498"} Jan 28 16:09:18 crc kubenswrapper[4811]: I0128 16:09:18.352625 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ba207bf-701f-4044-bd7e-5cd668e1dcf6" path="/var/lib/kubelet/pods/8ba207bf-701f-4044-bd7e-5cd668e1dcf6/volumes" Jan 28 16:09:18 crc kubenswrapper[4811]: I0128 16:09:18.844107 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3e729db-bb8d-4a95-92f0-0061630e1967","Type":"ContainerStarted","Data":"f17382d3afa91970aa3598b87d709c286f305f80ce80d542da7e90a480f491d8"} Jan 28 16:09:18 crc kubenswrapper[4811]: I0128 16:09:18.844450 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3e729db-bb8d-4a95-92f0-0061630e1967","Type":"ContainerStarted","Data":"b0843cebaca0372f82d1a769af9c87a8e04a38ef5a6b76924ef3d3869ca2d1d3"} Jan 28 16:09:19 crc kubenswrapper[4811]: I0128 16:09:19.853783 4811 generic.go:334] "Generic (PLEG): container finished" podID="d00b71d9-55f0-4ce4-9c27-1471372ef467" containerID="e82d4770dbd8df1af3c60b19065079eef075e181597a6fe329d2bfc69a97a55f" exitCode=0 Jan 28 16:09:19 crc kubenswrapper[4811]: I0128 16:09:19.853855 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vmzwm" event={"ID":"d00b71d9-55f0-4ce4-9c27-1471372ef467","Type":"ContainerDied","Data":"e82d4770dbd8df1af3c60b19065079eef075e181597a6fe329d2bfc69a97a55f"} Jan 28 16:09:19 crc kubenswrapper[4811]: I0128 16:09:19.883317 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.883298028 podStartE2EDuration="3.883298028s" podCreationTimestamp="2026-01-28 16:09:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:18.869166019 +0000 UTC m=+1451.623529612" watchObservedRunningTime="2026-01-28 16:09:19.883298028 +0000 UTC m=+1452.637661611" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.265782 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.265858 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.288891 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.602059 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.602376 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.631274 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.810932 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.874714 4811 generic.go:334] "Generic (PLEG): container finished" podID="d41fbd0a-fb75-4597-a9ed-0dff56b7b635" containerID="fa157babbe500c1563c46cb52c30c82654096cb6b82bef1d0c2006eedc63c1d2" exitCode=0 Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.875621 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" event={"ID":"d41fbd0a-fb75-4597-a9ed-0dff56b7b635","Type":"ContainerDied","Data":"fa157babbe500c1563c46cb52c30c82654096cb6b82bef1d0c2006eedc63c1d2"} Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.905230 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-j57cr"] Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.905570 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="dnsmasq-dns" containerID="cri-o://d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778" gracePeriod=10 Jan 28 16:09:20 crc kubenswrapper[4811]: I0128 16:09:20.929782 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.061820 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.166:5353: connect: connection refused" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.347676 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.348271 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.465844 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.471046 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638718 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-swift-storage-0\") pod \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638797 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t92rw\" (UniqueName: \"kubernetes.io/projected/d00b71d9-55f0-4ce4-9c27-1471372ef467-kube-api-access-t92rw\") pod \"d00b71d9-55f0-4ce4-9c27-1471372ef467\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638836 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-sb\") pod \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638866 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwb4g\" (UniqueName: \"kubernetes.io/projected/224a0600-f44e-482b-a6f0-c96aeb7c4e60-kube-api-access-fwb4g\") pod \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638917 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-combined-ca-bundle\") pod \"d00b71d9-55f0-4ce4-9c27-1471372ef467\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638944 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-svc\") pod \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.638994 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-nb\") pod \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.639060 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-config-data\") pod \"d00b71d9-55f0-4ce4-9c27-1471372ef467\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.639110 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-config\") pod \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\" (UID: \"224a0600-f44e-482b-a6f0-c96aeb7c4e60\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.639214 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-scripts\") pod \"d00b71d9-55f0-4ce4-9c27-1471372ef467\" (UID: \"d00b71d9-55f0-4ce4-9c27-1471372ef467\") " Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.646157 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/224a0600-f44e-482b-a6f0-c96aeb7c4e60-kube-api-access-fwb4g" (OuterVolumeSpecName: "kube-api-access-fwb4g") pod "224a0600-f44e-482b-a6f0-c96aeb7c4e60" (UID: "224a0600-f44e-482b-a6f0-c96aeb7c4e60"). InnerVolumeSpecName "kube-api-access-fwb4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.646720 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-scripts" (OuterVolumeSpecName: "scripts") pod "d00b71d9-55f0-4ce4-9c27-1471372ef467" (UID: "d00b71d9-55f0-4ce4-9c27-1471372ef467"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.647275 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d00b71d9-55f0-4ce4-9c27-1471372ef467-kube-api-access-t92rw" (OuterVolumeSpecName: "kube-api-access-t92rw") pod "d00b71d9-55f0-4ce4-9c27-1471372ef467" (UID: "d00b71d9-55f0-4ce4-9c27-1471372ef467"). InnerVolumeSpecName "kube-api-access-t92rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.675169 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-config-data" (OuterVolumeSpecName: "config-data") pod "d00b71d9-55f0-4ce4-9c27-1471372ef467" (UID: "d00b71d9-55f0-4ce4-9c27-1471372ef467"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.697535 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "224a0600-f44e-482b-a6f0-c96aeb7c4e60" (UID: "224a0600-f44e-482b-a6f0-c96aeb7c4e60"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.698896 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d00b71d9-55f0-4ce4-9c27-1471372ef467" (UID: "d00b71d9-55f0-4ce4-9c27-1471372ef467"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.699142 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-config" (OuterVolumeSpecName: "config") pod "224a0600-f44e-482b-a6f0-c96aeb7c4e60" (UID: "224a0600-f44e-482b-a6f0-c96aeb7c4e60"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.702370 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "224a0600-f44e-482b-a6f0-c96aeb7c4e60" (UID: "224a0600-f44e-482b-a6f0-c96aeb7c4e60"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.713163 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "224a0600-f44e-482b-a6f0-c96aeb7c4e60" (UID: "224a0600-f44e-482b-a6f0-c96aeb7c4e60"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.731740 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "224a0600-f44e-482b-a6f0-c96aeb7c4e60" (UID: "224a0600-f44e-482b-a6f0-c96aeb7c4e60"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741622 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741666 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741675 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741687 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741698 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t92rw\" (UniqueName: \"kubernetes.io/projected/d00b71d9-55f0-4ce4-9c27-1471372ef467-kube-api-access-t92rw\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741706 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741716 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwb4g\" (UniqueName: \"kubernetes.io/projected/224a0600-f44e-482b-a6f0-c96aeb7c4e60-kube-api-access-fwb4g\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741725 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00b71d9-55f0-4ce4-9c27-1471372ef467-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741734 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.741742 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/224a0600-f44e-482b-a6f0-c96aeb7c4e60-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.886317 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vmzwm" event={"ID":"d00b71d9-55f0-4ce4-9c27-1471372ef467","Type":"ContainerDied","Data":"cbdfd6494b8b3c8e1df95a53c8c809c47d3af989bd91517b26ffa30fc0173a74"} Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.886345 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vmzwm" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.886372 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbdfd6494b8b3c8e1df95a53c8c809c47d3af989bd91517b26ffa30fc0173a74" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.888819 4811 generic.go:334] "Generic (PLEG): container finished" podID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerID="d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778" exitCode=0 Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.888925 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.888960 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" event={"ID":"224a0600-f44e-482b-a6f0-c96aeb7c4e60","Type":"ContainerDied","Data":"d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778"} Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.889026 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-j57cr" event={"ID":"224a0600-f44e-482b-a6f0-c96aeb7c4e60","Type":"ContainerDied","Data":"23adde7554fd1f09c98e0262c9d68cbfc9469928871b309f8186491c0ca024e4"} Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.889050 4811 scope.go:117] "RemoveContainer" containerID="d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.918244 4811 scope.go:117] "RemoveContainer" containerID="f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.956872 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-j57cr"] Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.967948 4811 scope.go:117] "RemoveContainer" containerID="d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778" Jan 28 16:09:21 crc kubenswrapper[4811]: E0128 16:09:21.970659 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778\": container with ID starting with d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778 not found: ID does not exist" containerID="d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.970711 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778"} err="failed to get container status \"d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778\": rpc error: code = NotFound desc = could not find container \"d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778\": container with ID starting with d12160213921a2bdab4e2b7e544e5b930e90d86ffb8a833a1ca72f00afaa8778 not found: ID does not exist" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.970743 4811 scope.go:117] "RemoveContainer" containerID="f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16" Jan 28 16:09:21 crc kubenswrapper[4811]: E0128 16:09:21.971134 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16\": container with ID starting with f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16 not found: ID does not exist" containerID="f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.971165 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16"} err="failed to get container status \"f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16\": rpc error: code = NotFound desc = could not find container \"f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16\": container with ID starting with f93872847accd261ace129ff22ae2a979e2ba6bd139606428029ae2871dc1a16 not found: ID does not exist" Jan 28 16:09:21 crc kubenswrapper[4811]: I0128 16:09:21.976567 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-j57cr"] Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.029659 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.045157 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.045415 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-log" containerID="cri-o://b0843cebaca0372f82d1a769af9c87a8e04a38ef5a6b76924ef3d3869ca2d1d3" gracePeriod=30 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.045921 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-metadata" containerID="cri-o://f17382d3afa91970aa3598b87d709c286f305f80ce80d542da7e90a480f491d8" gracePeriod=30 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.054244 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-log" containerID="cri-o://924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f" gracePeriod=30 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.054591 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-api" containerID="cri-o://466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a" gracePeriod=30 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.056612 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.256882 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.257010 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.286566 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.353360 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" path="/var/lib/kubelet/pods/224a0600-f44e-482b-a6f0-c96aeb7c4e60/volumes" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.459170 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-config-data\") pod \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.459248 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnkh7\" (UniqueName: \"kubernetes.io/projected/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-kube-api-access-xnkh7\") pod \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.459326 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-scripts\") pod \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.459369 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-combined-ca-bundle\") pod \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\" (UID: \"d41fbd0a-fb75-4597-a9ed-0dff56b7b635\") " Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.464705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-scripts" (OuterVolumeSpecName: "scripts") pod "d41fbd0a-fb75-4597-a9ed-0dff56b7b635" (UID: "d41fbd0a-fb75-4597-a9ed-0dff56b7b635"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.470504 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-kube-api-access-xnkh7" (OuterVolumeSpecName: "kube-api-access-xnkh7") pod "d41fbd0a-fb75-4597-a9ed-0dff56b7b635" (UID: "d41fbd0a-fb75-4597-a9ed-0dff56b7b635"). InnerVolumeSpecName "kube-api-access-xnkh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.491539 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d41fbd0a-fb75-4597-a9ed-0dff56b7b635" (UID: "d41fbd0a-fb75-4597-a9ed-0dff56b7b635"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.506580 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-config-data" (OuterVolumeSpecName: "config-data") pod "d41fbd0a-fb75-4597-a9ed-0dff56b7b635" (UID: "d41fbd0a-fb75-4597-a9ed-0dff56b7b635"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.561458 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.561496 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.561508 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnkh7\" (UniqueName: \"kubernetes.io/projected/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-kube-api-access-xnkh7\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.561517 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41fbd0a-fb75-4597-a9ed-0dff56b7b635-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.916816 4811 generic.go:334] "Generic (PLEG): container finished" podID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerID="f17382d3afa91970aa3598b87d709c286f305f80ce80d542da7e90a480f491d8" exitCode=0 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.917134 4811 generic.go:334] "Generic (PLEG): container finished" podID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerID="b0843cebaca0372f82d1a769af9c87a8e04a38ef5a6b76924ef3d3869ca2d1d3" exitCode=143 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.917199 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3e729db-bb8d-4a95-92f0-0061630e1967","Type":"ContainerDied","Data":"f17382d3afa91970aa3598b87d709c286f305f80ce80d542da7e90a480f491d8"} Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.917227 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3e729db-bb8d-4a95-92f0-0061630e1967","Type":"ContainerDied","Data":"b0843cebaca0372f82d1a769af9c87a8e04a38ef5a6b76924ef3d3869ca2d1d3"} Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.936955 4811 generic.go:334] "Generic (PLEG): container finished" podID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerID="924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f" exitCode=143 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.937040 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e62bbb8a-9beb-41da-96c2-c81d6ba94f80","Type":"ContainerDied","Data":"924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f"} Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.954730 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.954742 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7kgb2" event={"ID":"d41fbd0a-fb75-4597-a9ed-0dff56b7b635","Type":"ContainerDied","Data":"a3cb3a2f97729347201d3151feb774dd86e493c3ffcc847999e5f7bf1b9a885e"} Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.954804 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3cb3a2f97729347201d3151feb774dd86e493c3ffcc847999e5f7bf1b9a885e" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.954796 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8088f76a-3481-4874-9ec2-b392869e4600" containerName="nova-scheduler-scheduler" containerID="cri-o://e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" gracePeriod=30 Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.973490 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.996719 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 16:09:22 crc kubenswrapper[4811]: E0128 16:09:22.997198 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41fbd0a-fb75-4597-a9ed-0dff56b7b635" containerName="nova-cell1-conductor-db-sync" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997222 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41fbd0a-fb75-4597-a9ed-0dff56b7b635" containerName="nova-cell1-conductor-db-sync" Jan 28 16:09:22 crc kubenswrapper[4811]: E0128 16:09:22.997237 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="dnsmasq-dns" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997257 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="dnsmasq-dns" Jan 28 16:09:22 crc kubenswrapper[4811]: E0128 16:09:22.997273 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="init" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997281 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="init" Jan 28 16:09:22 crc kubenswrapper[4811]: E0128 16:09:22.997313 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-log" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997323 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-log" Jan 28 16:09:22 crc kubenswrapper[4811]: E0128 16:09:22.997343 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d00b71d9-55f0-4ce4-9c27-1471372ef467" containerName="nova-manage" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997350 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d00b71d9-55f0-4ce4-9c27-1471372ef467" containerName="nova-manage" Jan 28 16:09:22 crc kubenswrapper[4811]: E0128 16:09:22.997363 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-metadata" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997370 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-metadata" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997647 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="224a0600-f44e-482b-a6f0-c96aeb7c4e60" containerName="dnsmasq-dns" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997685 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-metadata" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997695 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" containerName="nova-metadata-log" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997713 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d00b71d9-55f0-4ce4-9c27-1471372ef467" containerName="nova-manage" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.997728 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d41fbd0a-fb75-4597-a9ed-0dff56b7b635" containerName="nova-cell1-conductor-db-sync" Jan 28 16:09:22 crc kubenswrapper[4811]: I0128 16:09:22.998333 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.002136 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.041095 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.172702 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-nova-metadata-tls-certs\") pod \"a3e729db-bb8d-4a95-92f0-0061630e1967\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.172776 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3e729db-bb8d-4a95-92f0-0061630e1967-logs\") pod \"a3e729db-bb8d-4a95-92f0-0061630e1967\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.172803 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzp46\" (UniqueName: \"kubernetes.io/projected/a3e729db-bb8d-4a95-92f0-0061630e1967-kube-api-access-wzp46\") pod \"a3e729db-bb8d-4a95-92f0-0061630e1967\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.172831 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-combined-ca-bundle\") pod \"a3e729db-bb8d-4a95-92f0-0061630e1967\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.172887 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-config-data\") pod \"a3e729db-bb8d-4a95-92f0-0061630e1967\" (UID: \"a3e729db-bb8d-4a95-92f0-0061630e1967\") " Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.173173 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.173267 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.173384 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzqqd\" (UniqueName: \"kubernetes.io/projected/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-kube-api-access-dzqqd\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.173391 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3e729db-bb8d-4a95-92f0-0061630e1967-logs" (OuterVolumeSpecName: "logs") pod "a3e729db-bb8d-4a95-92f0-0061630e1967" (UID: "a3e729db-bb8d-4a95-92f0-0061630e1967"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.173581 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3e729db-bb8d-4a95-92f0-0061630e1967-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.181214 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3e729db-bb8d-4a95-92f0-0061630e1967-kube-api-access-wzp46" (OuterVolumeSpecName: "kube-api-access-wzp46") pod "a3e729db-bb8d-4a95-92f0-0061630e1967" (UID: "a3e729db-bb8d-4a95-92f0-0061630e1967"). InnerVolumeSpecName "kube-api-access-wzp46". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.203586 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-config-data" (OuterVolumeSpecName: "config-data") pod "a3e729db-bb8d-4a95-92f0-0061630e1967" (UID: "a3e729db-bb8d-4a95-92f0-0061630e1967"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.211888 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3e729db-bb8d-4a95-92f0-0061630e1967" (UID: "a3e729db-bb8d-4a95-92f0-0061630e1967"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.235853 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a3e729db-bb8d-4a95-92f0-0061630e1967" (UID: "a3e729db-bb8d-4a95-92f0-0061630e1967"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.274745 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.275131 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzqqd\" (UniqueName: \"kubernetes.io/projected/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-kube-api-access-dzqqd\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.275240 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.275390 4811 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.275477 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzp46\" (UniqueName: \"kubernetes.io/projected/a3e729db-bb8d-4a95-92f0-0061630e1967-kube-api-access-wzp46\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.275533 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.275607 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e729db-bb8d-4a95-92f0-0061630e1967-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.279660 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.280317 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.291588 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzqqd\" (UniqueName: \"kubernetes.io/projected/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-kube-api-access-dzqqd\") pod \"nova-cell1-conductor-0\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.365581 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.815308 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 16:09:23 crc kubenswrapper[4811]: W0128 16:09:23.824044 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94afc9c1_3ddc_4ad1_9df2_03c593f1d536.slice/crio-7c0b1ce8170f351d8458ca6d6586f555434faafaa0a62af39ce6252f9fd301ea WatchSource:0}: Error finding container 7c0b1ce8170f351d8458ca6d6586f555434faafaa0a62af39ce6252f9fd301ea: Status 404 returned error can't find the container with id 7c0b1ce8170f351d8458ca6d6586f555434faafaa0a62af39ce6252f9fd301ea Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.964939 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3e729db-bb8d-4a95-92f0-0061630e1967","Type":"ContainerDied","Data":"1fc9a107a46cb976a9e04c18a4b79bb897cae549d0947eee6aca8a7b3da1d498"} Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.965274 4811 scope.go:117] "RemoveContainer" containerID="f17382d3afa91970aa3598b87d709c286f305f80ce80d542da7e90a480f491d8" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.965395 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.966527 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"94afc9c1-3ddc-4ad1-9df2-03c593f1d536","Type":"ContainerStarted","Data":"7c0b1ce8170f351d8458ca6d6586f555434faafaa0a62af39ce6252f9fd301ea"} Jan 28 16:09:23 crc kubenswrapper[4811]: I0128 16:09:23.994028 4811 scope.go:117] "RemoveContainer" containerID="b0843cebaca0372f82d1a769af9c87a8e04a38ef5a6b76924ef3d3869ca2d1d3" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.003473 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.011082 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.019076 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.020846 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.023046 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.023811 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.050673 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.104546 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-config-data\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.104628 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.104672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c4qh\" (UniqueName: \"kubernetes.io/projected/d222430c-ef7f-46cd-b963-c500fec7bcc5-kube-api-access-8c4qh\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.104740 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.104762 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d222430c-ef7f-46cd-b963-c500fec7bcc5-logs\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.205692 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-config-data\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.205773 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.205797 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c4qh\" (UniqueName: \"kubernetes.io/projected/d222430c-ef7f-46cd-b963-c500fec7bcc5-kube-api-access-8c4qh\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.205822 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.205838 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d222430c-ef7f-46cd-b963-c500fec7bcc5-logs\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.206341 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d222430c-ef7f-46cd-b963-c500fec7bcc5-logs\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.211379 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.211775 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.214944 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-config-data\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.227892 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c4qh\" (UniqueName: \"kubernetes.io/projected/d222430c-ef7f-46cd-b963-c500fec7bcc5-kube-api-access-8c4qh\") pod \"nova-metadata-0\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.355255 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3e729db-bb8d-4a95-92f0-0061630e1967" path="/var/lib/kubelet/pods/a3e729db-bb8d-4a95-92f0-0061630e1967/volumes" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.404640 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.883222 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.976768 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d222430c-ef7f-46cd-b963-c500fec7bcc5","Type":"ContainerStarted","Data":"a837401250304d8fe6b32878e320eeebc261ca5dd338fe43b2776e87eb8ebc32"} Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.982401 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"94afc9c1-3ddc-4ad1-9df2-03c593f1d536","Type":"ContainerStarted","Data":"16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f"} Jan 28 16:09:24 crc kubenswrapper[4811]: I0128 16:09:24.983405 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:25 crc kubenswrapper[4811]: E0128 16:09:25.603417 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:09:25 crc kubenswrapper[4811]: E0128 16:09:25.604873 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:09:25 crc kubenswrapper[4811]: E0128 16:09:25.605999 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:09:25 crc kubenswrapper[4811]: E0128 16:09:25.606033 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8088f76a-3481-4874-9ec2-b392869e4600" containerName="nova-scheduler-scheduler" Jan 28 16:09:25 crc kubenswrapper[4811]: I0128 16:09:25.993197 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d222430c-ef7f-46cd-b963-c500fec7bcc5","Type":"ContainerStarted","Data":"265aa5c9fcb4bc9488c9027fffb3e87b0793f23d0e60be446a90ca677d4a1e3d"} Jan 28 16:09:25 crc kubenswrapper[4811]: I0128 16:09:25.993248 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d222430c-ef7f-46cd-b963-c500fec7bcc5","Type":"ContainerStarted","Data":"df9cffff43f236bf7b6aac1086fe3a61f9363207fc596a077ff0dc9718fdb5c3"} Jan 28 16:09:26 crc kubenswrapper[4811]: I0128 16:09:26.027939 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=4.027923729 podStartE2EDuration="4.027923729s" podCreationTimestamp="2026-01-28 16:09:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:25.006309157 +0000 UTC m=+1457.760672740" watchObservedRunningTime="2026-01-28 16:09:26.027923729 +0000 UTC m=+1458.782287302" Jan 28 16:09:26 crc kubenswrapper[4811]: I0128 16:09:26.032020 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.032006649 podStartE2EDuration="3.032006649s" podCreationTimestamp="2026-01-28 16:09:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:26.026898711 +0000 UTC m=+1458.781262304" watchObservedRunningTime="2026-01-28 16:09:26.032006649 +0000 UTC m=+1458.786370232" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.646292 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.771851 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-combined-ca-bundle\") pod \"8088f76a-3481-4874-9ec2-b392869e4600\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.772159 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qqht\" (UniqueName: \"kubernetes.io/projected/8088f76a-3481-4874-9ec2-b392869e4600-kube-api-access-8qqht\") pod \"8088f76a-3481-4874-9ec2-b392869e4600\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.772391 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-config-data\") pod \"8088f76a-3481-4874-9ec2-b392869e4600\" (UID: \"8088f76a-3481-4874-9ec2-b392869e4600\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.778834 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8088f76a-3481-4874-9ec2-b392869e4600-kube-api-access-8qqht" (OuterVolumeSpecName: "kube-api-access-8qqht") pod "8088f76a-3481-4874-9ec2-b392869e4600" (UID: "8088f76a-3481-4874-9ec2-b392869e4600"). InnerVolumeSpecName "kube-api-access-8qqht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.798119 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-config-data" (OuterVolumeSpecName: "config-data") pod "8088f76a-3481-4874-9ec2-b392869e4600" (UID: "8088f76a-3481-4874-9ec2-b392869e4600"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.803992 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8088f76a-3481-4874-9ec2-b392869e4600" (UID: "8088f76a-3481-4874-9ec2-b392869e4600"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.872839 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.878300 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.878359 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8088f76a-3481-4874-9ec2-b392869e4600-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.878376 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qqht\" (UniqueName: \"kubernetes.io/projected/8088f76a-3481-4874-9ec2-b392869e4600-kube-api-access-8qqht\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.981722 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-logs\") pod \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.981779 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-config-data\") pod \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.981877 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-combined-ca-bundle\") pod \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.981960 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmnqb\" (UniqueName: \"kubernetes.io/projected/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-kube-api-access-kmnqb\") pod \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\" (UID: \"e62bbb8a-9beb-41da-96c2-c81d6ba94f80\") " Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.982557 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-logs" (OuterVolumeSpecName: "logs") pod "e62bbb8a-9beb-41da-96c2-c81d6ba94f80" (UID: "e62bbb8a-9beb-41da-96c2-c81d6ba94f80"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:27 crc kubenswrapper[4811]: I0128 16:09:27.986056 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-kube-api-access-kmnqb" (OuterVolumeSpecName: "kube-api-access-kmnqb") pod "e62bbb8a-9beb-41da-96c2-c81d6ba94f80" (UID: "e62bbb8a-9beb-41da-96c2-c81d6ba94f80"). InnerVolumeSpecName "kube-api-access-kmnqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.017291 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e62bbb8a-9beb-41da-96c2-c81d6ba94f80" (UID: "e62bbb8a-9beb-41da-96c2-c81d6ba94f80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.018529 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.018565 4811 generic.go:334] "Generic (PLEG): container finished" podID="8088f76a-3481-4874-9ec2-b392869e4600" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" exitCode=0 Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.018524 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8088f76a-3481-4874-9ec2-b392869e4600","Type":"ContainerDied","Data":"e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc"} Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.018658 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8088f76a-3481-4874-9ec2-b392869e4600","Type":"ContainerDied","Data":"ec5801da1a666e108dbb8d55df3f3691c3ad2811c5246c2acc79978182258498"} Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.018691 4811 scope.go:117] "RemoveContainer" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.020883 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-config-data" (OuterVolumeSpecName: "config-data") pod "e62bbb8a-9beb-41da-96c2-c81d6ba94f80" (UID: "e62bbb8a-9beb-41da-96c2-c81d6ba94f80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.022866 4811 generic.go:334] "Generic (PLEG): container finished" podID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerID="466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a" exitCode=0 Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.022905 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e62bbb8a-9beb-41da-96c2-c81d6ba94f80","Type":"ContainerDied","Data":"466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a"} Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.022935 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e62bbb8a-9beb-41da-96c2-c81d6ba94f80","Type":"ContainerDied","Data":"3d3c45c12394211285145829171a6969ac2651ba3a423161684251fce9e61126"} Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.023089 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.052375 4811 scope.go:117] "RemoveContainer" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" Jan 28 16:09:28 crc kubenswrapper[4811]: E0128 16:09:28.058228 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc\": container with ID starting with e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc not found: ID does not exist" containerID="e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.058275 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc"} err="failed to get container status \"e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc\": rpc error: code = NotFound desc = could not find container \"e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc\": container with ID starting with e5392e216b02d50f10511c2e690d721240953f7e9305e191bff7e44bbcfb76fc not found: ID does not exist" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.058307 4811 scope.go:117] "RemoveContainer" containerID="466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.068774 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.084574 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmnqb\" (UniqueName: \"kubernetes.io/projected/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-kube-api-access-kmnqb\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.084621 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.084634 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.084645 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e62bbb8a-9beb-41da-96c2-c81d6ba94f80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.099546 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.104319 4811 scope.go:117] "RemoveContainer" containerID="924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.106348 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.114900 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.122217 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: E0128 16:09:28.122694 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-log" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.122715 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-log" Jan 28 16:09:28 crc kubenswrapper[4811]: E0128 16:09:28.122741 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8088f76a-3481-4874-9ec2-b392869e4600" containerName="nova-scheduler-scheduler" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.122749 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8088f76a-3481-4874-9ec2-b392869e4600" containerName="nova-scheduler-scheduler" Jan 28 16:09:28 crc kubenswrapper[4811]: E0128 16:09:28.122761 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-api" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.122770 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-api" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.122965 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-api" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.122986 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8088f76a-3481-4874-9ec2-b392869e4600" containerName="nova-scheduler-scheduler" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.123003 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" containerName="nova-api-log" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.124108 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.129996 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.131237 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.132473 4811 scope.go:117] "RemoveContainer" containerID="466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a" Jan 28 16:09:28 crc kubenswrapper[4811]: E0128 16:09:28.141315 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a\": container with ID starting with 466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a not found: ID does not exist" containerID="466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.141367 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a"} err="failed to get container status \"466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a\": rpc error: code = NotFound desc = could not find container \"466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a\": container with ID starting with 466c51b7bb6fc30544cfa4399863dbce99fa58138fb893df3c8ee97107a5df7a not found: ID does not exist" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.141411 4811 scope.go:117] "RemoveContainer" containerID="924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f" Jan 28 16:09:28 crc kubenswrapper[4811]: E0128 16:09:28.141919 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f\": container with ID starting with 924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f not found: ID does not exist" containerID="924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.141960 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f"} err="failed to get container status \"924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f\": rpc error: code = NotFound desc = could not find container \"924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f\": container with ID starting with 924c4f9f7ba58acdd29e7b25cdb7f32265686632fa646637292277c257c24b5f not found: ID does not exist" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.154065 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.155355 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.165064 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.166530 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.288397 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.288648 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45l7p\" (UniqueName: \"kubernetes.io/projected/31379f69-79a1-4684-aab1-3dc6cde7f876-kube-api-access-45l7p\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.288719 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-config-data\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.288882 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7lts\" (UniqueName: \"kubernetes.io/projected/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-kube-api-access-w7lts\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.288988 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-config-data\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.289147 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.289206 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-logs\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.350198 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8088f76a-3481-4874-9ec2-b392869e4600" path="/var/lib/kubelet/pods/8088f76a-3481-4874-9ec2-b392869e4600/volumes" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.350891 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e62bbb8a-9beb-41da-96c2-c81d6ba94f80" path="/var/lib/kubelet/pods/e62bbb8a-9beb-41da-96c2-c81d6ba94f80/volumes" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.391276 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45l7p\" (UniqueName: \"kubernetes.io/projected/31379f69-79a1-4684-aab1-3dc6cde7f876-kube-api-access-45l7p\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.391333 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-config-data\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.391426 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7lts\" (UniqueName: \"kubernetes.io/projected/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-kube-api-access-w7lts\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.392186 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-config-data\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.392298 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.392371 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-logs\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.392713 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-logs\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.392809 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.395566 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.397381 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-config-data\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.398149 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.398552 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-config-data\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.399259 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.410843 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45l7p\" (UniqueName: \"kubernetes.io/projected/31379f69-79a1-4684-aab1-3dc6cde7f876-kube-api-access-45l7p\") pod \"nova-scheduler-0\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.416960 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7lts\" (UniqueName: \"kubernetes.io/projected/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-kube-api-access-w7lts\") pod \"nova-api-0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.450621 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.482883 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:09:28 crc kubenswrapper[4811]: I0128 16:09:28.963845 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:28 crc kubenswrapper[4811]: W0128 16:09:28.966657 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod009dc16a_d4f9_44a5_9d97_caaca19d6fe0.slice/crio-efe49353f0a898509f27982132edb5700386b97942ba858235aceee554679ff2 WatchSource:0}: Error finding container efe49353f0a898509f27982132edb5700386b97942ba858235aceee554679ff2: Status 404 returned error can't find the container with id efe49353f0a898509f27982132edb5700386b97942ba858235aceee554679ff2 Jan 28 16:09:29 crc kubenswrapper[4811]: I0128 16:09:29.004401 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 28 16:09:29 crc kubenswrapper[4811]: I0128 16:09:29.061795 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:09:29 crc kubenswrapper[4811]: I0128 16:09:29.091852 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"009dc16a-d4f9-44a5-9d97-caaca19d6fe0","Type":"ContainerStarted","Data":"efe49353f0a898509f27982132edb5700386b97942ba858235aceee554679ff2"} Jan 28 16:09:29 crc kubenswrapper[4811]: W0128 16:09:29.091956 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31379f69_79a1_4684_aab1_3dc6cde7f876.slice/crio-9db3be0f5d2d26d3d9e8da58fdb7417a1c96579b0697903c035e93e64c047e10 WatchSource:0}: Error finding container 9db3be0f5d2d26d3d9e8da58fdb7417a1c96579b0697903c035e93e64c047e10: Status 404 returned error can't find the container with id 9db3be0f5d2d26d3d9e8da58fdb7417a1c96579b0697903c035e93e64c047e10 Jan 28 16:09:29 crc kubenswrapper[4811]: I0128 16:09:29.405378 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 16:09:29 crc kubenswrapper[4811]: I0128 16:09:29.406826 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 16:09:30 crc kubenswrapper[4811]: I0128 16:09:30.130588 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"31379f69-79a1-4684-aab1-3dc6cde7f876","Type":"ContainerStarted","Data":"a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e"} Jan 28 16:09:30 crc kubenswrapper[4811]: I0128 16:09:30.132629 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"31379f69-79a1-4684-aab1-3dc6cde7f876","Type":"ContainerStarted","Data":"9db3be0f5d2d26d3d9e8da58fdb7417a1c96579b0697903c035e93e64c047e10"} Jan 28 16:09:30 crc kubenswrapper[4811]: I0128 16:09:30.140570 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"009dc16a-d4f9-44a5-9d97-caaca19d6fe0","Type":"ContainerStarted","Data":"ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085"} Jan 28 16:09:30 crc kubenswrapper[4811]: I0128 16:09:30.140616 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"009dc16a-d4f9-44a5-9d97-caaca19d6fe0","Type":"ContainerStarted","Data":"8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3"} Jan 28 16:09:30 crc kubenswrapper[4811]: I0128 16:09:30.184706 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.18468525 podStartE2EDuration="2.18468525s" podCreationTimestamp="2026-01-28 16:09:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:30.163956258 +0000 UTC m=+1462.918319841" watchObservedRunningTime="2026-01-28 16:09:30.18468525 +0000 UTC m=+1462.939048833" Jan 28 16:09:30 crc kubenswrapper[4811]: I0128 16:09:30.192382 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.192360098 podStartE2EDuration="2.192360098s" podCreationTimestamp="2026-01-28 16:09:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:30.190109128 +0000 UTC m=+1462.944472711" watchObservedRunningTime="2026-01-28 16:09:30.192360098 +0000 UTC m=+1462.946723681" Jan 28 16:09:32 crc kubenswrapper[4811]: I0128 16:09:32.874723 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:09:32 crc kubenswrapper[4811]: I0128 16:09:32.875304 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f1fdd831-6083-4581-9377-64690fda7f53" containerName="kube-state-metrics" containerID="cri-o://7c020406f2a1e5d49e8f348d463ba2141c15287a626315f1b18551e0680999d5" gracePeriod=30 Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.086928 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.086982 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.192328 4811 generic.go:334] "Generic (PLEG): container finished" podID="f1fdd831-6083-4581-9377-64690fda7f53" containerID="7c020406f2a1e5d49e8f348d463ba2141c15287a626315f1b18551e0680999d5" exitCode=2 Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.192642 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f1fdd831-6083-4581-9377-64690fda7f53","Type":"ContainerDied","Data":"7c020406f2a1e5d49e8f348d463ba2141c15287a626315f1b18551e0680999d5"} Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.436675 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.482952 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.601619 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jjmf\" (UniqueName: \"kubernetes.io/projected/f1fdd831-6083-4581-9377-64690fda7f53-kube-api-access-4jjmf\") pod \"f1fdd831-6083-4581-9377-64690fda7f53\" (UID: \"f1fdd831-6083-4581-9377-64690fda7f53\") " Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.608674 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1fdd831-6083-4581-9377-64690fda7f53-kube-api-access-4jjmf" (OuterVolumeSpecName: "kube-api-access-4jjmf") pod "f1fdd831-6083-4581-9377-64690fda7f53" (UID: "f1fdd831-6083-4581-9377-64690fda7f53"). InnerVolumeSpecName "kube-api-access-4jjmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:33 crc kubenswrapper[4811]: I0128 16:09:33.703948 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jjmf\" (UniqueName: \"kubernetes.io/projected/f1fdd831-6083-4581-9377-64690fda7f53-kube-api-access-4jjmf\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.201495 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f1fdd831-6083-4581-9377-64690fda7f53","Type":"ContainerDied","Data":"21d371442278dc823f8e71e20ece900098a5704be7485feafc3b158ef73002d8"} Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.201555 4811 scope.go:117] "RemoveContainer" containerID="7c020406f2a1e5d49e8f348d463ba2141c15287a626315f1b18551e0680999d5" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.202716 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.243614 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.256349 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.283838 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:09:34 crc kubenswrapper[4811]: E0128 16:09:34.284249 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1fdd831-6083-4581-9377-64690fda7f53" containerName="kube-state-metrics" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.284271 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1fdd831-6083-4581-9377-64690fda7f53" containerName="kube-state-metrics" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.284523 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1fdd831-6083-4581-9377-64690fda7f53" containerName="kube-state-metrics" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.285221 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.288113 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.288542 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.314708 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.350743 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1fdd831-6083-4581-9377-64690fda7f53" path="/var/lib/kubelet/pods/f1fdd831-6083-4581-9377-64690fda7f53/volumes" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.404878 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.404927 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.415399 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.415639 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.415687 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x92tf\" (UniqueName: \"kubernetes.io/projected/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-api-access-x92tf\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.415715 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.518120 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.519572 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.520074 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x92tf\" (UniqueName: \"kubernetes.io/projected/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-api-access-x92tf\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.520953 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.525406 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.526150 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.529168 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.540072 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x92tf\" (UniqueName: \"kubernetes.io/projected/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-api-access-x92tf\") pod \"kube-state-metrics-0\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " pod="openstack/kube-state-metrics-0" Jan 28 16:09:34 crc kubenswrapper[4811]: I0128 16:09:34.606824 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.096942 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.097630 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="proxy-httpd" containerID="cri-o://db4969e7eae14d649bae46a40da595e17d04d84ff57158f7ede3e074c7f88727" gracePeriod=30 Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.097885 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="sg-core" containerID="cri-o://b54595b460d03992d41e0f94bc574753ac7714f0e15342526fcdf6512a200762" gracePeriod=30 Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.097904 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-notification-agent" containerID="cri-o://61d4fb1b858b16fac9b00cd9b6b475e592a7228779167af789965f5956469f50" gracePeriod=30 Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.098030 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-central-agent" containerID="cri-o://86a9ce1ee6a598f91fa5ddd10125ca218183e8e2571c9c6f19c1ef7c5ca8b01a" gracePeriod=30 Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.146853 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:09:35 crc kubenswrapper[4811]: W0128 16:09:35.160924 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf80333f7_9036_49d9_8c68_03e4ef8f9ee8.slice/crio-bf3f0f91a9b6080c89dd07d5273046218b41be1d4dc81fb77ede832dc3bc4508 WatchSource:0}: Error finding container bf3f0f91a9b6080c89dd07d5273046218b41be1d4dc81fb77ede832dc3bc4508: Status 404 returned error can't find the container with id bf3f0f91a9b6080c89dd07d5273046218b41be1d4dc81fb77ede832dc3bc4508 Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.166887 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.218591 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f80333f7-9036-49d9-8c68-03e4ef8f9ee8","Type":"ContainerStarted","Data":"bf3f0f91a9b6080c89dd07d5273046218b41be1d4dc81fb77ede832dc3bc4508"} Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.421406 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 28 16:09:35 crc kubenswrapper[4811]: I0128 16:09:35.421467 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.236159 4811 generic.go:334] "Generic (PLEG): container finished" podID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerID="db4969e7eae14d649bae46a40da595e17d04d84ff57158f7ede3e074c7f88727" exitCode=0 Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.236420 4811 generic.go:334] "Generic (PLEG): container finished" podID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerID="b54595b460d03992d41e0f94bc574753ac7714f0e15342526fcdf6512a200762" exitCode=2 Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.236452 4811 generic.go:334] "Generic (PLEG): container finished" podID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerID="86a9ce1ee6a598f91fa5ddd10125ca218183e8e2571c9c6f19c1ef7c5ca8b01a" exitCode=0 Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.236223 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerDied","Data":"db4969e7eae14d649bae46a40da595e17d04d84ff57158f7ede3e074c7f88727"} Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.236519 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerDied","Data":"b54595b460d03992d41e0f94bc574753ac7714f0e15342526fcdf6512a200762"} Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.236535 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerDied","Data":"86a9ce1ee6a598f91fa5ddd10125ca218183e8e2571c9c6f19c1ef7c5ca8b01a"} Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.238148 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f80333f7-9036-49d9-8c68-03e4ef8f9ee8","Type":"ContainerStarted","Data":"01de404d5b6e89fb138dbe6253054f8965d2469aab77f7679acb0d66fb85419b"} Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.239359 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 28 16:09:36 crc kubenswrapper[4811]: I0128 16:09:36.285226 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.8399239619999999 podStartE2EDuration="2.285203454s" podCreationTimestamp="2026-01-28 16:09:34 +0000 UTC" firstStartedPulling="2026-01-28 16:09:35.166640163 +0000 UTC m=+1467.921003736" lastFinishedPulling="2026-01-28 16:09:35.611919645 +0000 UTC m=+1468.366283228" observedRunningTime="2026-01-28 16:09:36.276238391 +0000 UTC m=+1469.030601974" watchObservedRunningTime="2026-01-28 16:09:36.285203454 +0000 UTC m=+1469.039567037" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.262201 4811 generic.go:334] "Generic (PLEG): container finished" podID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerID="61d4fb1b858b16fac9b00cd9b6b475e592a7228779167af789965f5956469f50" exitCode=0 Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.263715 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerDied","Data":"61d4fb1b858b16fac9b00cd9b6b475e592a7228779167af789965f5956469f50"} Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.568840 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.680207 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-combined-ca-bundle\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.680316 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-log-httpd\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.680394 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-config-data\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.680465 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-sg-core-conf-yaml\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.680638 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-run-httpd\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.681062 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.681088 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.681111 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-scripts\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.681143 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7549x\" (UniqueName: \"kubernetes.io/projected/726e91cb-f20b-4176-bd40-0d2253bb1707-kube-api-access-7549x\") pod \"726e91cb-f20b-4176-bd40-0d2253bb1707\" (UID: \"726e91cb-f20b-4176-bd40-0d2253bb1707\") " Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.681997 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.682089 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/726e91cb-f20b-4176-bd40-0d2253bb1707-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.685624 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/726e91cb-f20b-4176-bd40-0d2253bb1707-kube-api-access-7549x" (OuterVolumeSpecName: "kube-api-access-7549x") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "kube-api-access-7549x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.685623 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-scripts" (OuterVolumeSpecName: "scripts") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.707149 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.763627 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.783100 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.783141 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.783157 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7549x\" (UniqueName: \"kubernetes.io/projected/726e91cb-f20b-4176-bd40-0d2253bb1707-kube-api-access-7549x\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.783170 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.793742 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-config-data" (OuterVolumeSpecName: "config-data") pod "726e91cb-f20b-4176-bd40-0d2253bb1707" (UID: "726e91cb-f20b-4176-bd40-0d2253bb1707"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:37 crc kubenswrapper[4811]: I0128 16:09:37.885913 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/726e91cb-f20b-4176-bd40-0d2253bb1707-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.273960 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.273950 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"726e91cb-f20b-4176-bd40-0d2253bb1707","Type":"ContainerDied","Data":"605ca69fba1dfafb9e19f1ac554d4b94e8804341a000c30a65783368b1edab87"} Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.274024 4811 scope.go:117] "RemoveContainer" containerID="db4969e7eae14d649bae46a40da595e17d04d84ff57158f7ede3e074c7f88727" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.311048 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.319736 4811 scope.go:117] "RemoveContainer" containerID="b54595b460d03992d41e0f94bc574753ac7714f0e15342526fcdf6512a200762" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.321977 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.361841 4811 scope.go:117] "RemoveContainer" containerID="61d4fb1b858b16fac9b00cd9b6b475e592a7228779167af789965f5956469f50" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.363744 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" path="/var/lib/kubelet/pods/726e91cb-f20b-4176-bd40-0d2253bb1707/volumes" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.364520 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:38 crc kubenswrapper[4811]: E0128 16:09:38.364789 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="sg-core" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.364805 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="sg-core" Jan 28 16:09:38 crc kubenswrapper[4811]: E0128 16:09:38.364823 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="proxy-httpd" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.364831 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="proxy-httpd" Jan 28 16:09:38 crc kubenswrapper[4811]: E0128 16:09:38.364852 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-central-agent" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.364858 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-central-agent" Jan 28 16:09:38 crc kubenswrapper[4811]: E0128 16:09:38.364876 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-notification-agent" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.364882 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-notification-agent" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.365057 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="sg-core" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.365075 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-notification-agent" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.365089 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="proxy-httpd" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.365097 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="726e91cb-f20b-4176-bd40-0d2253bb1707" containerName="ceilometer-central-agent" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.367739 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.367823 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.370788 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.371052 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.371305 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.385885 4811 scope.go:117] "RemoveContainer" containerID="86a9ce1ee6a598f91fa5ddd10125ca218183e8e2571c9c6f19c1ef7c5ca8b01a" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395102 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395175 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-run-httpd\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395206 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-log-httpd\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395309 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn258\" (UniqueName: \"kubernetes.io/projected/aa9c631f-b34b-4a59-9f16-9d3d834b6733-kube-api-access-cn258\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395335 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-config-data\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395361 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395394 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-scripts\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.395596 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.452583 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.452856 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.483422 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497155 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497266 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497320 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-run-httpd\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497344 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-log-httpd\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497403 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn258\" (UniqueName: \"kubernetes.io/projected/aa9c631f-b34b-4a59-9f16-9d3d834b6733-kube-api-access-cn258\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497444 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-config-data\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497471 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.497498 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-scripts\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.500317 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-run-httpd\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.502644 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-log-httpd\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.503867 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.504562 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.504737 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.506481 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-scripts\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.516522 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-config-data\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.517077 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn258\" (UniqueName: \"kubernetes.io/projected/aa9c631f-b34b-4a59-9f16-9d3d834b6733-kube-api-access-cn258\") pod \"ceilometer-0\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " pod="openstack/ceilometer-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.528711 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 28 16:09:38 crc kubenswrapper[4811]: I0128 16:09:38.692123 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:39 crc kubenswrapper[4811]: I0128 16:09:39.139526 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:39 crc kubenswrapper[4811]: W0128 16:09:39.140680 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa9c631f_b34b_4a59_9f16_9d3d834b6733.slice/crio-2450c217ba98dc151f807730bcd9128e9523966569a3c52adb1af1cc767b533e WatchSource:0}: Error finding container 2450c217ba98dc151f807730bcd9128e9523966569a3c52adb1af1cc767b533e: Status 404 returned error can't find the container with id 2450c217ba98dc151f807730bcd9128e9523966569a3c52adb1af1cc767b533e Jan 28 16:09:39 crc kubenswrapper[4811]: I0128 16:09:39.287684 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerStarted","Data":"2450c217ba98dc151f807730bcd9128e9523966569a3c52adb1af1cc767b533e"} Jan 28 16:09:39 crc kubenswrapper[4811]: I0128 16:09:39.315422 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 28 16:09:39 crc kubenswrapper[4811]: I0128 16:09:39.533617 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:09:39 crc kubenswrapper[4811]: I0128 16:09:39.533860 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:09:40 crc kubenswrapper[4811]: I0128 16:09:40.301187 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerStarted","Data":"7156168d7ae5ea487f2eb8be1c308e23e0c1ce025dda0ffa237ec352ac6b2554"} Jan 28 16:09:44 crc kubenswrapper[4811]: I0128 16:09:44.349217 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerStarted","Data":"6292c26f1573b7d96779b02bcf11915a359ed8ad1d7e3a0f7e0cf2873a4be186"} Jan 28 16:09:44 crc kubenswrapper[4811]: I0128 16:09:44.416069 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 16:09:44 crc kubenswrapper[4811]: I0128 16:09:44.417670 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 16:09:44 crc kubenswrapper[4811]: I0128 16:09:44.456263 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 16:09:44 crc kubenswrapper[4811]: I0128 16:09:44.622043 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 28 16:09:45 crc kubenswrapper[4811]: I0128 16:09:45.361122 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerStarted","Data":"7eb7ce2504aac3787810e47efe14313e4edb046e457f30460a7b6420b3909ef0"} Jan 28 16:09:45 crc kubenswrapper[4811]: I0128 16:09:45.367151 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.350268 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.381798 4811 generic.go:334] "Generic (PLEG): container finished" podID="4e0bf01f-9db1-413a-800b-feaf02565788" containerID="6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858" exitCode=137 Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.381841 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.381874 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4e0bf01f-9db1-413a-800b-feaf02565788","Type":"ContainerDied","Data":"6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858"} Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.381916 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4e0bf01f-9db1-413a-800b-feaf02565788","Type":"ContainerDied","Data":"88a23539acfe786aa17bcf851ec8b99b374dd171026ecefd4edf52054f2431ff"} Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.381932 4811 scope.go:117] "RemoveContainer" containerID="6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.410088 4811 scope.go:117] "RemoveContainer" containerID="6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858" Jan 28 16:09:46 crc kubenswrapper[4811]: E0128 16:09:46.410530 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858\": container with ID starting with 6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858 not found: ID does not exist" containerID="6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.410577 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858"} err="failed to get container status \"6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858\": rpc error: code = NotFound desc = could not find container \"6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858\": container with ID starting with 6881afdd1d1b6c2c0c7b0e98a13a618820c92b266030f68596d0970ff79a4858 not found: ID does not exist" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.455229 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfdtn\" (UniqueName: \"kubernetes.io/projected/4e0bf01f-9db1-413a-800b-feaf02565788-kube-api-access-lfdtn\") pod \"4e0bf01f-9db1-413a-800b-feaf02565788\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.455408 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-combined-ca-bundle\") pod \"4e0bf01f-9db1-413a-800b-feaf02565788\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.455470 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-config-data\") pod \"4e0bf01f-9db1-413a-800b-feaf02565788\" (UID: \"4e0bf01f-9db1-413a-800b-feaf02565788\") " Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.459934 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0bf01f-9db1-413a-800b-feaf02565788-kube-api-access-lfdtn" (OuterVolumeSpecName: "kube-api-access-lfdtn") pod "4e0bf01f-9db1-413a-800b-feaf02565788" (UID: "4e0bf01f-9db1-413a-800b-feaf02565788"). InnerVolumeSpecName "kube-api-access-lfdtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.480701 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-config-data" (OuterVolumeSpecName: "config-data") pod "4e0bf01f-9db1-413a-800b-feaf02565788" (UID: "4e0bf01f-9db1-413a-800b-feaf02565788"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.483076 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e0bf01f-9db1-413a-800b-feaf02565788" (UID: "4e0bf01f-9db1-413a-800b-feaf02565788"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.559867 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfdtn\" (UniqueName: \"kubernetes.io/projected/4e0bf01f-9db1-413a-800b-feaf02565788-kube-api-access-lfdtn\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.560313 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.560338 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0bf01f-9db1-413a-800b-feaf02565788-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.713355 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.722064 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.733449 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:46 crc kubenswrapper[4811]: E0128 16:09:46.733890 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0bf01f-9db1-413a-800b-feaf02565788" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.733911 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0bf01f-9db1-413a-800b-feaf02565788" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.734156 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0bf01f-9db1-413a-800b-feaf02565788" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.734867 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.739794 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.740003 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.739804 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.746469 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.866354 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.866450 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.866551 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.866598 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bn5z\" (UniqueName: \"kubernetes.io/projected/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-kube-api-access-9bn5z\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.866625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.968002 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.968051 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.968106 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.968164 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bn5z\" (UniqueName: \"kubernetes.io/projected/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-kube-api-access-9bn5z\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.968203 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.973598 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.973863 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.984118 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.987414 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:46 crc kubenswrapper[4811]: I0128 16:09:46.990203 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bn5z\" (UniqueName: \"kubernetes.io/projected/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-kube-api-access-9bn5z\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:47 crc kubenswrapper[4811]: I0128 16:09:47.051372 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:47 crc kubenswrapper[4811]: I0128 16:09:47.392879 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerStarted","Data":"893d99491571c4fd2fd83af5f826adf679d49f6644351fd123796d5a01ea5c2e"} Jan 28 16:09:47 crc kubenswrapper[4811]: I0128 16:09:47.393220 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:09:47 crc kubenswrapper[4811]: I0128 16:09:47.419898 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.382661337 podStartE2EDuration="9.419878397s" podCreationTimestamp="2026-01-28 16:09:38 +0000 UTC" firstStartedPulling="2026-01-28 16:09:39.143100392 +0000 UTC m=+1471.897463985" lastFinishedPulling="2026-01-28 16:09:46.180317442 +0000 UTC m=+1478.934681045" observedRunningTime="2026-01-28 16:09:47.413246828 +0000 UTC m=+1480.167610411" watchObservedRunningTime="2026-01-28 16:09:47.419878397 +0000 UTC m=+1480.174241990" Jan 28 16:09:47 crc kubenswrapper[4811]: I0128 16:09:47.478763 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.353497 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e0bf01f-9db1-413a-800b-feaf02565788" path="/var/lib/kubelet/pods/4e0bf01f-9db1-413a-800b-feaf02565788/volumes" Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.407873 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0bb2deb3-c0dd-4102-96dd-b21dd187bf89","Type":"ContainerStarted","Data":"48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c"} Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.407926 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0bb2deb3-c0dd-4102-96dd-b21dd187bf89","Type":"ContainerStarted","Data":"9a02085d03a698530a4112ea54ee18620633ea2518ef96a46e19a88eda1d0b9b"} Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.433036 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.433012658 podStartE2EDuration="2.433012658s" podCreationTimestamp="2026-01-28 16:09:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:48.42500028 +0000 UTC m=+1481.179363863" watchObservedRunningTime="2026-01-28 16:09:48.433012658 +0000 UTC m=+1481.187376251" Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.454885 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.455237 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.455675 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 16:09:48 crc kubenswrapper[4811]: I0128 16:09:48.480101 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.415412 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.419741 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.593767 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-s5tc2"] Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.596028 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.631054 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-s5tc2"] Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.721207 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-svc\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.721256 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-sb\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.721338 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-swift-storage-0\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.721417 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-config\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.721507 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hk7k\" (UniqueName: \"kubernetes.io/projected/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-kube-api-access-7hk7k\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.721545 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-nb\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.822866 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-svc\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.822917 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-sb\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.822957 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-swift-storage-0\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.823037 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-config\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.823107 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hk7k\" (UniqueName: \"kubernetes.io/projected/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-kube-api-access-7hk7k\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.823138 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-nb\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.824267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-svc\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.824289 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-sb\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.824364 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-nb\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.824408 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-config\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.824582 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-swift-storage-0\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.849209 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hk7k\" (UniqueName: \"kubernetes.io/projected/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-kube-api-access-7hk7k\") pod \"dnsmasq-dns-867cd545c7-s5tc2\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:49 crc kubenswrapper[4811]: I0128 16:09:49.919470 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:50 crc kubenswrapper[4811]: I0128 16:09:50.434462 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-s5tc2"] Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.440372 4811 generic.go:334] "Generic (PLEG): container finished" podID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerID="c407971373e14f6665ab21c12a8b6508395da0a3c8d48f45186867275344d4b2" exitCode=0 Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.440478 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" event={"ID":"bfd184d9-d9d2-4a4b-a672-bfb76837eaca","Type":"ContainerDied","Data":"c407971373e14f6665ab21c12a8b6508395da0a3c8d48f45186867275344d4b2"} Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.440953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" event={"ID":"bfd184d9-d9d2-4a4b-a672-bfb76837eaca","Type":"ContainerStarted","Data":"6eb827e232c933ee3a3117528575820dd17b0085464efb3cbedaee14dc25f970"} Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.902800 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.903505 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-central-agent" containerID="cri-o://7156168d7ae5ea487f2eb8be1c308e23e0c1ce025dda0ffa237ec352ac6b2554" gracePeriod=30 Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.903539 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="proxy-httpd" containerID="cri-o://893d99491571c4fd2fd83af5f826adf679d49f6644351fd123796d5a01ea5c2e" gracePeriod=30 Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.903621 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="sg-core" containerID="cri-o://7eb7ce2504aac3787810e47efe14313e4edb046e457f30460a7b6420b3909ef0" gracePeriod=30 Jan 28 16:09:51 crc kubenswrapper[4811]: I0128 16:09:51.903657 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-notification-agent" containerID="cri-o://6292c26f1573b7d96779b02bcf11915a359ed8ad1d7e3a0f7e0cf2873a4be186" gracePeriod=30 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.053645 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.179095 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.450797 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" event={"ID":"bfd184d9-d9d2-4a4b-a672-bfb76837eaca","Type":"ContainerStarted","Data":"2737a1ce65612a85e61445090a2668841ba19af66f524535b6accf0e61ca2d75"} Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.451536 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455328 4811 generic.go:334] "Generic (PLEG): container finished" podID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerID="893d99491571c4fd2fd83af5f826adf679d49f6644351fd123796d5a01ea5c2e" exitCode=0 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455363 4811 generic.go:334] "Generic (PLEG): container finished" podID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerID="7eb7ce2504aac3787810e47efe14313e4edb046e457f30460a7b6420b3909ef0" exitCode=2 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455374 4811 generic.go:334] "Generic (PLEG): container finished" podID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerID="6292c26f1573b7d96779b02bcf11915a359ed8ad1d7e3a0f7e0cf2873a4be186" exitCode=0 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455382 4811 generic.go:334] "Generic (PLEG): container finished" podID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerID="7156168d7ae5ea487f2eb8be1c308e23e0c1ce025dda0ffa237ec352ac6b2554" exitCode=0 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455509 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerDied","Data":"893d99491571c4fd2fd83af5f826adf679d49f6644351fd123796d5a01ea5c2e"} Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455545 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerDied","Data":"7eb7ce2504aac3787810e47efe14313e4edb046e457f30460a7b6420b3909ef0"} Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455561 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerDied","Data":"6292c26f1573b7d96779b02bcf11915a359ed8ad1d7e3a0f7e0cf2873a4be186"} Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455572 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerDied","Data":"7156168d7ae5ea487f2eb8be1c308e23e0c1ce025dda0ffa237ec352ac6b2554"} Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455591 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-log" containerID="cri-o://8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3" gracePeriod=30 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.455660 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-api" containerID="cri-o://ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085" gracePeriod=30 Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.473520 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" podStartSLOduration=3.4735006950000002 podStartE2EDuration="3.473500695s" podCreationTimestamp="2026-01-28 16:09:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:52.471275184 +0000 UTC m=+1485.225638777" watchObservedRunningTime="2026-01-28 16:09:52.473500695 +0000 UTC m=+1485.227864298" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.744801 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.890890 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn258\" (UniqueName: \"kubernetes.io/projected/aa9c631f-b34b-4a59-9f16-9d3d834b6733-kube-api-access-cn258\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891067 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-config-data\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891730 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-run-httpd\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891775 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-scripts\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891795 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-ceilometer-tls-certs\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891830 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-log-httpd\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891854 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-sg-core-conf-yaml\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.891873 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-combined-ca-bundle\") pod \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\" (UID: \"aa9c631f-b34b-4a59-9f16-9d3d834b6733\") " Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.892024 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.892157 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.892715 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.896765 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-scripts" (OuterVolumeSpecName: "scripts") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.897158 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa9c631f-b34b-4a59-9f16-9d3d834b6733-kube-api-access-cn258" (OuterVolumeSpecName: "kube-api-access-cn258") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "kube-api-access-cn258". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.925916 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.956550 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.975715 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.994673 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.994712 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.994727 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn258\" (UniqueName: \"kubernetes.io/projected/aa9c631f-b34b-4a59-9f16-9d3d834b6733-kube-api-access-cn258\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.994740 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.994753 4811 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.994763 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa9c631f-b34b-4a59-9f16-9d3d834b6733-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:52 crc kubenswrapper[4811]: I0128 16:09:52.999623 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-config-data" (OuterVolumeSpecName: "config-data") pod "aa9c631f-b34b-4a59-9f16-9d3d834b6733" (UID: "aa9c631f-b34b-4a59-9f16-9d3d834b6733"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.096715 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9c631f-b34b-4a59-9f16-9d3d834b6733-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.467086 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa9c631f-b34b-4a59-9f16-9d3d834b6733","Type":"ContainerDied","Data":"2450c217ba98dc151f807730bcd9128e9523966569a3c52adb1af1cc767b533e"} Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.467121 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.467139 4811 scope.go:117] "RemoveContainer" containerID="893d99491571c4fd2fd83af5f826adf679d49f6644351fd123796d5a01ea5c2e" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.521313 4811 generic.go:334] "Generic (PLEG): container finished" podID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerID="8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3" exitCode=143 Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.521376 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"009dc16a-d4f9-44a5-9d97-caaca19d6fe0","Type":"ContainerDied","Data":"8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3"} Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.551357 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.560868 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.588358 4811 scope.go:117] "RemoveContainer" containerID="7eb7ce2504aac3787810e47efe14313e4edb046e457f30460a7b6420b3909ef0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.603663 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:53 crc kubenswrapper[4811]: E0128 16:09:53.605142 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="proxy-httpd" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.605171 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="proxy-httpd" Jan 28 16:09:53 crc kubenswrapper[4811]: E0128 16:09:53.607408 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-central-agent" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607439 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-central-agent" Jan 28 16:09:53 crc kubenswrapper[4811]: E0128 16:09:53.607474 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-notification-agent" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607482 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-notification-agent" Jan 28 16:09:53 crc kubenswrapper[4811]: E0128 16:09:53.607505 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="sg-core" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607511 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="sg-core" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607906 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="proxy-httpd" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607928 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-notification-agent" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607961 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="ceilometer-central-agent" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.607978 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" containerName="sg-core" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.614629 4811 scope.go:117] "RemoveContainer" containerID="6292c26f1573b7d96779b02bcf11915a359ed8ad1d7e3a0f7e0cf2873a4be186" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.619149 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.623279 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.623539 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.623912 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.626744 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.650564 4811 scope.go:117] "RemoveContainer" containerID="7156168d7ae5ea487f2eb8be1c308e23e0c1ce025dda0ffa237ec352ac6b2554" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.722423 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.722498 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-config-data\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.722533 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.722802 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-run-httpd\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.722878 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rgnh\" (UniqueName: \"kubernetes.io/projected/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-kube-api-access-7rgnh\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.722926 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-scripts\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.723228 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.723418 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-log-httpd\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.824860 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-run-httpd\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.824924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rgnh\" (UniqueName: \"kubernetes.io/projected/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-kube-api-access-7rgnh\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.824957 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-scripts\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.825019 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.825083 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-log-httpd\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.825163 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.825190 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-config-data\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.825222 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.825967 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-log-httpd\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.826508 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-run-httpd\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.838766 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.839058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.839346 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-config-data\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.840659 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.840790 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-scripts\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.843645 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rgnh\" (UniqueName: \"kubernetes.io/projected/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-kube-api-access-7rgnh\") pod \"ceilometer-0\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " pod="openstack/ceilometer-0" Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.844966 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:53 crc kubenswrapper[4811]: I0128 16:09:53.845682 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:09:54 crc kubenswrapper[4811]: I0128 16:09:54.299276 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:09:54 crc kubenswrapper[4811]: I0128 16:09:54.348525 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa9c631f-b34b-4a59-9f16-9d3d834b6733" path="/var/lib/kubelet/pods/aa9c631f-b34b-4a59-9f16-9d3d834b6733/volumes" Jan 28 16:09:54 crc kubenswrapper[4811]: I0128 16:09:54.531410 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerStarted","Data":"f0e9a45f4dc51fcf44e525e51ccfc3b9da9f5135b0d1de15668ae2ed776dff3c"} Jan 28 16:09:55 crc kubenswrapper[4811]: I0128 16:09:55.542839 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerStarted","Data":"4118d781d07ee77f52069b5e89d67a8d192eb68dd733377011ee2da5f304380b"} Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.056634 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.168658 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-config-data\") pod \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.168725 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7lts\" (UniqueName: \"kubernetes.io/projected/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-kube-api-access-w7lts\") pod \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.168880 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-logs\") pod \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.168925 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-combined-ca-bundle\") pod \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\" (UID: \"009dc16a-d4f9-44a5-9d97-caaca19d6fe0\") " Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.169867 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-logs" (OuterVolumeSpecName: "logs") pod "009dc16a-d4f9-44a5-9d97-caaca19d6fe0" (UID: "009dc16a-d4f9-44a5-9d97-caaca19d6fe0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.170017 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.187307 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-kube-api-access-w7lts" (OuterVolumeSpecName: "kube-api-access-w7lts") pod "009dc16a-d4f9-44a5-9d97-caaca19d6fe0" (UID: "009dc16a-d4f9-44a5-9d97-caaca19d6fe0"). InnerVolumeSpecName "kube-api-access-w7lts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.199560 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-config-data" (OuterVolumeSpecName: "config-data") pod "009dc16a-d4f9-44a5-9d97-caaca19d6fe0" (UID: "009dc16a-d4f9-44a5-9d97-caaca19d6fe0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.203356 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "009dc16a-d4f9-44a5-9d97-caaca19d6fe0" (UID: "009dc16a-d4f9-44a5-9d97-caaca19d6fe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.271666 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.271694 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.271703 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7lts\" (UniqueName: \"kubernetes.io/projected/009dc16a-d4f9-44a5-9d97-caaca19d6fe0-kube-api-access-w7lts\") on node \"crc\" DevicePath \"\"" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.552680 4811 generic.go:334] "Generic (PLEG): container finished" podID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerID="ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085" exitCode=0 Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.552970 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"009dc16a-d4f9-44a5-9d97-caaca19d6fe0","Type":"ContainerDied","Data":"ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085"} Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.552996 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"009dc16a-d4f9-44a5-9d97-caaca19d6fe0","Type":"ContainerDied","Data":"efe49353f0a898509f27982132edb5700386b97942ba858235aceee554679ff2"} Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.553012 4811 scope.go:117] "RemoveContainer" containerID="ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.553100 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.572510 4811 scope.go:117] "RemoveContainer" containerID="8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.583868 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.595462 4811 scope.go:117] "RemoveContainer" containerID="ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.603033 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:56 crc kubenswrapper[4811]: E0128 16:09:56.603305 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085\": container with ID starting with ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085 not found: ID does not exist" containerID="ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.603366 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085"} err="failed to get container status \"ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085\": rpc error: code = NotFound desc = could not find container \"ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085\": container with ID starting with ab2d650179f0753c6a2187d31f7407a99a03ce6d2e6ca9683099831903045085 not found: ID does not exist" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.603402 4811 scope.go:117] "RemoveContainer" containerID="8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3" Jan 28 16:09:56 crc kubenswrapper[4811]: E0128 16:09:56.604019 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3\": container with ID starting with 8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3 not found: ID does not exist" containerID="8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.604073 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3"} err="failed to get container status \"8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3\": rpc error: code = NotFound desc = could not find container \"8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3\": container with ID starting with 8e18d0242b412d0be1d3b056b0585556044366bd6e81723846f9c5e5c26893f3 not found: ID does not exist" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.610608 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:56 crc kubenswrapper[4811]: E0128 16:09:56.611147 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-log" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.611171 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-log" Jan 28 16:09:56 crc kubenswrapper[4811]: E0128 16:09:56.611199 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-api" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.611207 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-api" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.611456 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-api" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.611500 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" containerName="nova-api-log" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.612789 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.618869 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.619789 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.622644 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.622977 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.782788 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.782868 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-public-tls-certs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.782898 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-config-data\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.783008 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59a9a13-6ed9-4964-9e5a-75a433f2af91-logs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.783108 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2scp\" (UniqueName: \"kubernetes.io/projected/f59a9a13-6ed9-4964-9e5a-75a433f2af91-kube-api-access-n2scp\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.783146 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.884597 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59a9a13-6ed9-4964-9e5a-75a433f2af91-logs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.884702 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2scp\" (UniqueName: \"kubernetes.io/projected/f59a9a13-6ed9-4964-9e5a-75a433f2af91-kube-api-access-n2scp\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.884736 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.884781 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.884816 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-public-tls-certs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.884843 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-config-data\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.885384 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59a9a13-6ed9-4964-9e5a-75a433f2af91-logs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.889299 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-public-tls-certs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.889380 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.893472 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.893484 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-config-data\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.919792 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2scp\" (UniqueName: \"kubernetes.io/projected/f59a9a13-6ed9-4964-9e5a-75a433f2af91-kube-api-access-n2scp\") pod \"nova-api-0\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " pod="openstack/nova-api-0" Jan 28 16:09:56 crc kubenswrapper[4811]: I0128 16:09:56.938455 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.055121 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.088338 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:57 crc kubenswrapper[4811]: W0128 16:09:57.469577 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf59a9a13_6ed9_4964_9e5a_75a433f2af91.slice/crio-c426ec794e100248b537703655dfd35385f36812034b911f419e4e0a98f1a4f7 WatchSource:0}: Error finding container c426ec794e100248b537703655dfd35385f36812034b911f419e4e0a98f1a4f7: Status 404 returned error can't find the container with id c426ec794e100248b537703655dfd35385f36812034b911f419e4e0a98f1a4f7 Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.473261 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.565544 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f59a9a13-6ed9-4964-9e5a-75a433f2af91","Type":"ContainerStarted","Data":"c426ec794e100248b537703655dfd35385f36812034b911f419e4e0a98f1a4f7"} Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.570861 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerStarted","Data":"d9f9a0f8d8ef4d173f67b5dae4bd737f60adc547940317d4473c9e7ac4c9d5d2"} Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.570909 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerStarted","Data":"58eb72bdbc49c1db4bd6a8f3ff0f677fe0081e578427777c49080084da3fac18"} Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.588226 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.762124 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-62xln"] Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.763562 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.766481 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.766647 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.772464 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-62xln"] Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.912886 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv7d9\" (UniqueName: \"kubernetes.io/projected/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-kube-api-access-rv7d9\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.912988 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-config-data\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.913073 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:57 crc kubenswrapper[4811]: I0128 16:09:57.913242 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-scripts\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.015288 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-scripts\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.015353 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv7d9\" (UniqueName: \"kubernetes.io/projected/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-kube-api-access-rv7d9\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.015444 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-config-data\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.015513 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.022190 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-config-data\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.023723 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-scripts\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.031027 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.039022 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv7d9\" (UniqueName: \"kubernetes.io/projected/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-kube-api-access-rv7d9\") pod \"nova-cell1-cell-mapping-62xln\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.141837 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.356958 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="009dc16a-d4f9-44a5-9d97-caaca19d6fe0" path="/var/lib/kubelet/pods/009dc16a-d4f9-44a5-9d97-caaca19d6fe0/volumes" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.595348 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f59a9a13-6ed9-4964-9e5a-75a433f2af91","Type":"ContainerStarted","Data":"f1dc2c0cf9c366683765c1d61c4aa18586a492be7ea204c0c696e477a57f76da"} Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.595402 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f59a9a13-6ed9-4964-9e5a-75a433f2af91","Type":"ContainerStarted","Data":"42166adad71b121cacc3b9a332614612b07f6a216ed277a4e35096db55ccc529"} Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.615216 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.615194586 podStartE2EDuration="2.615194586s" podCreationTimestamp="2026-01-28 16:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:58.611532307 +0000 UTC m=+1491.365895900" watchObservedRunningTime="2026-01-28 16:09:58.615194586 +0000 UTC m=+1491.369558169" Jan 28 16:09:58 crc kubenswrapper[4811]: I0128 16:09:58.653142 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-62xln"] Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.607266 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerStarted","Data":"ea9b41d1b39833e282998b715a5c1ac858988843c729530075218e5953aca943"} Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.607546 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.608859 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="proxy-httpd" containerID="cri-o://ea9b41d1b39833e282998b715a5c1ac858988843c729530075218e5953aca943" gracePeriod=30 Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.608915 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-notification-agent" containerID="cri-o://58eb72bdbc49c1db4bd6a8f3ff0f677fe0081e578427777c49080084da3fac18" gracePeriod=30 Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.608864 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="sg-core" containerID="cri-o://d9f9a0f8d8ef4d173f67b5dae4bd737f60adc547940317d4473c9e7ac4c9d5d2" gracePeriod=30 Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.609888 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-central-agent" containerID="cri-o://4118d781d07ee77f52069b5e89d67a8d192eb68dd733377011ee2da5f304380b" gracePeriod=30 Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.610088 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-62xln" event={"ID":"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464","Type":"ContainerStarted","Data":"edeadfe37f452b4de66d49e0830821b1e8e7fe1d122ac5020fcef25c60b8e824"} Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.610115 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-62xln" event={"ID":"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464","Type":"ContainerStarted","Data":"6436ff7c6686dc9be86faacaa2000aeb7e399aa9c9349cbbe23e4bb1a462b560"} Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.635639 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.065425695 podStartE2EDuration="6.635620595s" podCreationTimestamp="2026-01-28 16:09:53 +0000 UTC" firstStartedPulling="2026-01-28 16:09:54.303814729 +0000 UTC m=+1487.058178312" lastFinishedPulling="2026-01-28 16:09:58.874009629 +0000 UTC m=+1491.628373212" observedRunningTime="2026-01-28 16:09:59.631969125 +0000 UTC m=+1492.386332708" watchObservedRunningTime="2026-01-28 16:09:59.635620595 +0000 UTC m=+1492.389984178" Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.659569 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-62xln" podStartSLOduration=2.659552514 podStartE2EDuration="2.659552514s" podCreationTimestamp="2026-01-28 16:09:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:09:59.655928576 +0000 UTC m=+1492.410292179" watchObservedRunningTime="2026-01-28 16:09:59.659552514 +0000 UTC m=+1492.413916097" Jan 28 16:09:59 crc kubenswrapper[4811]: I0128 16:09:59.920613 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.026778 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-4f9jb"] Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.027064 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerName="dnsmasq-dns" containerID="cri-o://666b3a6eca458a563f70b41aeac57d36deb8775e0080957e620a6808c3007d9e" gracePeriod=10 Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.624377 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerID="ea9b41d1b39833e282998b715a5c1ac858988843c729530075218e5953aca943" exitCode=0 Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.624670 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerID="d9f9a0f8d8ef4d173f67b5dae4bd737f60adc547940317d4473c9e7ac4c9d5d2" exitCode=2 Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.624684 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerID="58eb72bdbc49c1db4bd6a8f3ff0f677fe0081e578427777c49080084da3fac18" exitCode=0 Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.624561 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerDied","Data":"ea9b41d1b39833e282998b715a5c1ac858988843c729530075218e5953aca943"} Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.624745 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerDied","Data":"d9f9a0f8d8ef4d173f67b5dae4bd737f60adc547940317d4473c9e7ac4c9d5d2"} Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.624759 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerDied","Data":"58eb72bdbc49c1db4bd6a8f3ff0f677fe0081e578427777c49080084da3fac18"} Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.626271 4811 generic.go:334] "Generic (PLEG): container finished" podID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerID="666b3a6eca458a563f70b41aeac57d36deb8775e0080957e620a6808c3007d9e" exitCode=0 Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.627133 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" event={"ID":"c4a4b149-cbd4-4b98-8b5d-9585a386f681","Type":"ContainerDied","Data":"666b3a6eca458a563f70b41aeac57d36deb8775e0080957e620a6808c3007d9e"} Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.770416 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.884160 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x9ss\" (UniqueName: \"kubernetes.io/projected/c4a4b149-cbd4-4b98-8b5d-9585a386f681-kube-api-access-6x9ss\") pod \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.884482 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-swift-storage-0\") pod \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.885416 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-svc\") pod \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.885551 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-config\") pod \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.885577 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-sb\") pod \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.885687 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-nb\") pod \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\" (UID: \"c4a4b149-cbd4-4b98-8b5d-9585a386f681\") " Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.897515 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4a4b149-cbd4-4b98-8b5d-9585a386f681-kube-api-access-6x9ss" (OuterVolumeSpecName: "kube-api-access-6x9ss") pod "c4a4b149-cbd4-4b98-8b5d-9585a386f681" (UID: "c4a4b149-cbd4-4b98-8b5d-9585a386f681"). InnerVolumeSpecName "kube-api-access-6x9ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.934754 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c4a4b149-cbd4-4b98-8b5d-9585a386f681" (UID: "c4a4b149-cbd4-4b98-8b5d-9585a386f681"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.945058 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c4a4b149-cbd4-4b98-8b5d-9585a386f681" (UID: "c4a4b149-cbd4-4b98-8b5d-9585a386f681"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.949061 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c4a4b149-cbd4-4b98-8b5d-9585a386f681" (UID: "c4a4b149-cbd4-4b98-8b5d-9585a386f681"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.950622 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-config" (OuterVolumeSpecName: "config") pod "c4a4b149-cbd4-4b98-8b5d-9585a386f681" (UID: "c4a4b149-cbd4-4b98-8b5d-9585a386f681"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.967897 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c4a4b149-cbd4-4b98-8b5d-9585a386f681" (UID: "c4a4b149-cbd4-4b98-8b5d-9585a386f681"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.989989 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.990034 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x9ss\" (UniqueName: \"kubernetes.io/projected/c4a4b149-cbd4-4b98-8b5d-9585a386f681-kube-api-access-6x9ss\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.990048 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.990059 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.990072 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:00 crc kubenswrapper[4811]: I0128 16:10:00.990085 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4a4b149-cbd4-4b98-8b5d-9585a386f681-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:01 crc kubenswrapper[4811]: I0128 16:10:01.636466 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" event={"ID":"c4a4b149-cbd4-4b98-8b5d-9585a386f681","Type":"ContainerDied","Data":"4f13d314dbb6a1201dc356662b9e6ce4d36f72cfa561dec434ce84ae9a7e91d9"} Jan 28 16:10:01 crc kubenswrapper[4811]: I0128 16:10:01.636518 4811 scope.go:117] "RemoveContainer" containerID="666b3a6eca458a563f70b41aeac57d36deb8775e0080957e620a6808c3007d9e" Jan 28 16:10:01 crc kubenswrapper[4811]: I0128 16:10:01.636566 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-4f9jb" Jan 28 16:10:01 crc kubenswrapper[4811]: I0128 16:10:01.668028 4811 scope.go:117] "RemoveContainer" containerID="60fc5e97ee0333cff729231505c0e54f88165f1cba7b16d4d350d4b4f2cd7f7c" Jan 28 16:10:01 crc kubenswrapper[4811]: I0128 16:10:01.682121 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-4f9jb"] Jan 28 16:10:01 crc kubenswrapper[4811]: I0128 16:10:01.690423 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-4f9jb"] Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.349239 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" path="/var/lib/kubelet/pods/c4a4b149-cbd4-4b98-8b5d-9585a386f681/volumes" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.662923 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerID="4118d781d07ee77f52069b5e89d67a8d192eb68dd733377011ee2da5f304380b" exitCode=0 Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.662963 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerDied","Data":"4118d781d07ee77f52069b5e89d67a8d192eb68dd733377011ee2da5f304380b"} Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.662991 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad1819b6-f8f6-488e-b0a5-2c032a50bb57","Type":"ContainerDied","Data":"f0e9a45f4dc51fcf44e525e51ccfc3b9da9f5135b0d1de15668ae2ed776dff3c"} Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.663001 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0e9a45f4dc51fcf44e525e51ccfc3b9da9f5135b0d1de15668ae2ed776dff3c" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.709313 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758663 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-ceilometer-tls-certs\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758762 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-config-data\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758827 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-run-httpd\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758866 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rgnh\" (UniqueName: \"kubernetes.io/projected/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-kube-api-access-7rgnh\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758906 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-log-httpd\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758941 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-combined-ca-bundle\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.758978 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-sg-core-conf-yaml\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.759006 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-scripts\") pod \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\" (UID: \"ad1819b6-f8f6-488e-b0a5-2c032a50bb57\") " Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.760561 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.760955 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.763974 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-scripts" (OuterVolumeSpecName: "scripts") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.775853 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-kube-api-access-7rgnh" (OuterVolumeSpecName: "kube-api-access-7rgnh") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "kube-api-access-7rgnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.787751 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.814335 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.835084 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860642 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rgnh\" (UniqueName: \"kubernetes.io/projected/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-kube-api-access-7rgnh\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860677 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860686 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860697 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860707 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860716 4811 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.860725 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.874550 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-config-data" (OuterVolumeSpecName: "config-data") pod "ad1819b6-f8f6-488e-b0a5-2c032a50bb57" (UID: "ad1819b6-f8f6-488e-b0a5-2c032a50bb57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:02 crc kubenswrapper[4811]: I0128 16:10:02.963071 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1819b6-f8f6-488e-b0a5-2c032a50bb57-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.087502 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.087583 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.087644 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.088791 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.089006 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" gracePeriod=600 Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.208510 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.672932 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" exitCode=0 Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.673015 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2"} Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.673068 4811 scope.go:117] "RemoveContainer" containerID="b949c0e59097384d76683740d569c1b6a4440ebd12914648594ff426daaad130" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.673069 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.673584 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.673822 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.728149 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.744554 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.761373 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.761904 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="sg-core" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.761932 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="sg-core" Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.761956 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerName="init" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.761966 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerName="init" Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.761983 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="proxy-httpd" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.761991 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="proxy-httpd" Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.762016 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-notification-agent" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762024 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-notification-agent" Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.762050 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-central-agent" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762057 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-central-agent" Jan 28 16:10:03 crc kubenswrapper[4811]: E0128 16:10:03.762072 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerName="dnsmasq-dns" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762080 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerName="dnsmasq-dns" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762284 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-notification-agent" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762301 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4a4b149-cbd4-4b98-8b5d-9585a386f681" containerName="dnsmasq-dns" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762318 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="ceilometer-central-agent" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762329 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="sg-core" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.762341 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" containerName="proxy-httpd" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.765993 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.769246 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.769548 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.769753 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.772228 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.776629 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6sj4\" (UniqueName: \"kubernetes.io/projected/4fca8964-49f3-477e-8b8c-b64a5200e386-kube-api-access-j6sj4\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.776685 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.776717 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.776759 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-log-httpd\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.776800 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.777039 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-run-httpd\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.777086 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-scripts\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.777193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-config-data\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880245 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6sj4\" (UniqueName: \"kubernetes.io/projected/4fca8964-49f3-477e-8b8c-b64a5200e386-kube-api-access-j6sj4\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880284 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880301 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880329 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-log-httpd\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880359 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880385 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-run-httpd\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880407 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-scripts\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880454 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-config-data\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.880987 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-log-httpd\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.881402 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-run-httpd\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.885645 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.885804 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-scripts\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.886947 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-config-data\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.897306 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.897534 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:03 crc kubenswrapper[4811]: I0128 16:10:03.907211 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6sj4\" (UniqueName: \"kubernetes.io/projected/4fca8964-49f3-477e-8b8c-b64a5200e386-kube-api-access-j6sj4\") pod \"ceilometer-0\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " pod="openstack/ceilometer-0" Jan 28 16:10:04 crc kubenswrapper[4811]: I0128 16:10:04.090383 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:10:04 crc kubenswrapper[4811]: I0128 16:10:04.350292 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad1819b6-f8f6-488e-b0a5-2c032a50bb57" path="/var/lib/kubelet/pods/ad1819b6-f8f6-488e-b0a5-2c032a50bb57/volumes" Jan 28 16:10:04 crc kubenswrapper[4811]: I0128 16:10:04.532611 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:10:04 crc kubenswrapper[4811]: W0128 16:10:04.533667 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fca8964_49f3_477e_8b8c_b64a5200e386.slice/crio-90d8526cdbd4e8a0599f410846a1cd7d6f98d1195bbe610cbe6f3713a0ba9b79 WatchSource:0}: Error finding container 90d8526cdbd4e8a0599f410846a1cd7d6f98d1195bbe610cbe6f3713a0ba9b79: Status 404 returned error can't find the container with id 90d8526cdbd4e8a0599f410846a1cd7d6f98d1195bbe610cbe6f3713a0ba9b79 Jan 28 16:10:04 crc kubenswrapper[4811]: I0128 16:10:04.684637 4811 generic.go:334] "Generic (PLEG): container finished" podID="4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" containerID="edeadfe37f452b4de66d49e0830821b1e8e7fe1d122ac5020fcef25c60b8e824" exitCode=0 Jan 28 16:10:04 crc kubenswrapper[4811]: I0128 16:10:04.684683 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-62xln" event={"ID":"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464","Type":"ContainerDied","Data":"edeadfe37f452b4de66d49e0830821b1e8e7fe1d122ac5020fcef25c60b8e824"} Jan 28 16:10:04 crc kubenswrapper[4811]: I0128 16:10:04.685805 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerStarted","Data":"90d8526cdbd4e8a0599f410846a1cd7d6f98d1195bbe610cbe6f3713a0ba9b79"} Jan 28 16:10:05 crc kubenswrapper[4811]: I0128 16:10:05.702110 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerStarted","Data":"42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287"} Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.052518 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.219241 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv7d9\" (UniqueName: \"kubernetes.io/projected/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-kube-api-access-rv7d9\") pod \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.219412 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-config-data\") pod \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.219580 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-scripts\") pod \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.219665 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-combined-ca-bundle\") pod \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\" (UID: \"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464\") " Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.227603 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-scripts" (OuterVolumeSpecName: "scripts") pod "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" (UID: "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.227626 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-kube-api-access-rv7d9" (OuterVolumeSpecName: "kube-api-access-rv7d9") pod "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" (UID: "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464"). InnerVolumeSpecName "kube-api-access-rv7d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.245383 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-config-data" (OuterVolumeSpecName: "config-data") pod "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" (UID: "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.246952 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" (UID: "4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.322633 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv7d9\" (UniqueName: \"kubernetes.io/projected/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-kube-api-access-rv7d9\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.322673 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.322686 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.322700 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.715443 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-62xln" event={"ID":"4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464","Type":"ContainerDied","Data":"6436ff7c6686dc9be86faacaa2000aeb7e399aa9c9349cbbe23e4bb1a462b560"} Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.715711 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6436ff7c6686dc9be86faacaa2000aeb7e399aa9c9349cbbe23e4bb1a462b560" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.715688 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-62xln" Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.718071 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerStarted","Data":"bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963"} Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.718125 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerStarted","Data":"261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088"} Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.897616 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.897876 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-log" containerID="cri-o://42166adad71b121cacc3b9a332614612b07f6a216ed277a4e35096db55ccc529" gracePeriod=30 Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.898351 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-api" containerID="cri-o://f1dc2c0cf9c366683765c1d61c4aa18586a492be7ea204c0c696e477a57f76da" gracePeriod=30 Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.909526 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.909759 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="31379f69-79a1-4684-aab1-3dc6cde7f876" containerName="nova-scheduler-scheduler" containerID="cri-o://a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e" gracePeriod=30 Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.954579 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.954800 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-log" containerID="cri-o://df9cffff43f236bf7b6aac1086fe3a61f9363207fc596a077ff0dc9718fdb5c3" gracePeriod=30 Jan 28 16:10:06 crc kubenswrapper[4811]: I0128 16:10:06.954921 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-metadata" containerID="cri-o://265aa5c9fcb4bc9488c9027fffb3e87b0793f23d0e60be446a90ca677d4a1e3d" gracePeriod=30 Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.781785 4811 generic.go:334] "Generic (PLEG): container finished" podID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerID="f1dc2c0cf9c366683765c1d61c4aa18586a492be7ea204c0c696e477a57f76da" exitCode=0 Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.782181 4811 generic.go:334] "Generic (PLEG): container finished" podID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerID="42166adad71b121cacc3b9a332614612b07f6a216ed277a4e35096db55ccc529" exitCode=143 Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.782217 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f59a9a13-6ed9-4964-9e5a-75a433f2af91","Type":"ContainerDied","Data":"f1dc2c0cf9c366683765c1d61c4aa18586a492be7ea204c0c696e477a57f76da"} Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.782241 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f59a9a13-6ed9-4964-9e5a-75a433f2af91","Type":"ContainerDied","Data":"42166adad71b121cacc3b9a332614612b07f6a216ed277a4e35096db55ccc529"} Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.789619 4811 generic.go:334] "Generic (PLEG): container finished" podID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerID="df9cffff43f236bf7b6aac1086fe3a61f9363207fc596a077ff0dc9718fdb5c3" exitCode=143 Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.789648 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d222430c-ef7f-46cd-b963-c500fec7bcc5","Type":"ContainerDied","Data":"df9cffff43f236bf7b6aac1086fe3a61f9363207fc596a077ff0dc9718fdb5c3"} Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.868271 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.965881 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-internal-tls-certs\") pod \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.966015 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2scp\" (UniqueName: \"kubernetes.io/projected/f59a9a13-6ed9-4964-9e5a-75a433f2af91-kube-api-access-n2scp\") pod \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.966045 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59a9a13-6ed9-4964-9e5a-75a433f2af91-logs\") pod \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.966122 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-config-data\") pod \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.967295 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f59a9a13-6ed9-4964-9e5a-75a433f2af91-logs" (OuterVolumeSpecName: "logs") pod "f59a9a13-6ed9-4964-9e5a-75a433f2af91" (UID: "f59a9a13-6ed9-4964-9e5a-75a433f2af91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.974247 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f59a9a13-6ed9-4964-9e5a-75a433f2af91-kube-api-access-n2scp" (OuterVolumeSpecName: "kube-api-access-n2scp") pod "f59a9a13-6ed9-4964-9e5a-75a433f2af91" (UID: "f59a9a13-6ed9-4964-9e5a-75a433f2af91"). InnerVolumeSpecName "kube-api-access-n2scp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:07 crc kubenswrapper[4811]: I0128 16:10:07.992057 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-config-data" (OuterVolumeSpecName: "config-data") pod "f59a9a13-6ed9-4964-9e5a-75a433f2af91" (UID: "f59a9a13-6ed9-4964-9e5a-75a433f2af91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.017994 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f59a9a13-6ed9-4964-9e5a-75a433f2af91" (UID: "f59a9a13-6ed9-4964-9e5a-75a433f2af91"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.067331 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-public-tls-certs\") pod \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.067419 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-combined-ca-bundle\") pod \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\" (UID: \"f59a9a13-6ed9-4964-9e5a-75a433f2af91\") " Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.067756 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.067775 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2scp\" (UniqueName: \"kubernetes.io/projected/f59a9a13-6ed9-4964-9e5a-75a433f2af91-kube-api-access-n2scp\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.067785 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59a9a13-6ed9-4964-9e5a-75a433f2af91-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.067796 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.097290 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f59a9a13-6ed9-4964-9e5a-75a433f2af91" (UID: "f59a9a13-6ed9-4964-9e5a-75a433f2af91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.129425 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f59a9a13-6ed9-4964-9e5a-75a433f2af91" (UID: "f59a9a13-6ed9-4964-9e5a-75a433f2af91"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.168971 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.169003 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59a9a13-6ed9-4964-9e5a-75a433f2af91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.180171 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.373480 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-combined-ca-bundle\") pod \"31379f69-79a1-4684-aab1-3dc6cde7f876\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.373826 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45l7p\" (UniqueName: \"kubernetes.io/projected/31379f69-79a1-4684-aab1-3dc6cde7f876-kube-api-access-45l7p\") pod \"31379f69-79a1-4684-aab1-3dc6cde7f876\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.374534 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-config-data\") pod \"31379f69-79a1-4684-aab1-3dc6cde7f876\" (UID: \"31379f69-79a1-4684-aab1-3dc6cde7f876\") " Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.377237 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31379f69-79a1-4684-aab1-3dc6cde7f876-kube-api-access-45l7p" (OuterVolumeSpecName: "kube-api-access-45l7p") pod "31379f69-79a1-4684-aab1-3dc6cde7f876" (UID: "31379f69-79a1-4684-aab1-3dc6cde7f876"). InnerVolumeSpecName "kube-api-access-45l7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.417296 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-config-data" (OuterVolumeSpecName: "config-data") pod "31379f69-79a1-4684-aab1-3dc6cde7f876" (UID: "31379f69-79a1-4684-aab1-3dc6cde7f876"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.427614 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31379f69-79a1-4684-aab1-3dc6cde7f876" (UID: "31379f69-79a1-4684-aab1-3dc6cde7f876"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.476400 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.476465 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45l7p\" (UniqueName: \"kubernetes.io/projected/31379f69-79a1-4684-aab1-3dc6cde7f876-kube-api-access-45l7p\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.476480 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31379f69-79a1-4684-aab1-3dc6cde7f876-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.802826 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerStarted","Data":"2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1"} Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.802948 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.805731 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f59a9a13-6ed9-4964-9e5a-75a433f2af91","Type":"ContainerDied","Data":"c426ec794e100248b537703655dfd35385f36812034b911f419e4e0a98f1a4f7"} Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.805781 4811 scope.go:117] "RemoveContainer" containerID="f1dc2c0cf9c366683765c1d61c4aa18586a492be7ea204c0c696e477a57f76da" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.805748 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.811076 4811 generic.go:334] "Generic (PLEG): container finished" podID="31379f69-79a1-4684-aab1-3dc6cde7f876" containerID="a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e" exitCode=0 Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.811121 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"31379f69-79a1-4684-aab1-3dc6cde7f876","Type":"ContainerDied","Data":"a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e"} Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.811148 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"31379f69-79a1-4684-aab1-3dc6cde7f876","Type":"ContainerDied","Data":"9db3be0f5d2d26d3d9e8da58fdb7417a1c96579b0697903c035e93e64c047e10"} Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.811201 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.828828 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.875462086 podStartE2EDuration="5.828809917s" podCreationTimestamp="2026-01-28 16:10:03 +0000 UTC" firstStartedPulling="2026-01-28 16:10:04.535919212 +0000 UTC m=+1497.290282795" lastFinishedPulling="2026-01-28 16:10:08.489267033 +0000 UTC m=+1501.243630626" observedRunningTime="2026-01-28 16:10:08.826987488 +0000 UTC m=+1501.581351091" watchObservedRunningTime="2026-01-28 16:10:08.828809917 +0000 UTC m=+1501.583173500" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.833346 4811 scope.go:117] "RemoveContainer" containerID="42166adad71b121cacc3b9a332614612b07f6a216ed277a4e35096db55ccc529" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.858015 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.880823 4811 scope.go:117] "RemoveContainer" containerID="a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.890078 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.925685 4811 scope.go:117] "RemoveContainer" containerID="a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e" Jan 28 16:10:08 crc kubenswrapper[4811]: E0128 16:10:08.927157 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e\": container with ID starting with a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e not found: ID does not exist" containerID="a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.927196 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e"} err="failed to get container status \"a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e\": rpc error: code = NotFound desc = could not find container \"a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e\": container with ID starting with a7c49aa51e51a45b4ee2c642909c851092986608cbe6ccfc70b994dcb0532b7e not found: ID does not exist" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.939611 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: E0128 16:10:08.940041 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31379f69-79a1-4684-aab1-3dc6cde7f876" containerName="nova-scheduler-scheduler" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940066 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="31379f69-79a1-4684-aab1-3dc6cde7f876" containerName="nova-scheduler-scheduler" Jan 28 16:10:08 crc kubenswrapper[4811]: E0128 16:10:08.940082 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-api" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940089 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-api" Jan 28 16:10:08 crc kubenswrapper[4811]: E0128 16:10:08.940099 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" containerName="nova-manage" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940106 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" containerName="nova-manage" Jan 28 16:10:08 crc kubenswrapper[4811]: E0128 16:10:08.940126 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-log" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940131 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-log" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940679 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-api" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940709 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="31379f69-79a1-4684-aab1-3dc6cde7f876" containerName="nova-scheduler-scheduler" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940724 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" containerName="nova-manage" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.940735 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" containerName="nova-api-log" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.941729 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.949051 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.949189 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.949243 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.955672 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.967970 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.977239 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.985468 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.986613 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.988543 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 16:10:08 crc kubenswrapper[4811]: I0128 16:10:08.996003 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.086804 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-config-data\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.086884 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-public-tls-certs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.086915 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.086952 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgdmz\" (UniqueName: \"kubernetes.io/projected/f5158a65-ce89-4a2b-9a19-0e4a6070562a-kube-api-access-fgdmz\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.086980 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d75f7765-1a6b-4bb5-819a-6891694a29c8-logs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.087000 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.087026 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vgnl\" (UniqueName: \"kubernetes.io/projected/d75f7765-1a6b-4bb5-819a-6891694a29c8-kube-api-access-9vgnl\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.087044 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.087069 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-config-data\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188399 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188485 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgdmz\" (UniqueName: \"kubernetes.io/projected/f5158a65-ce89-4a2b-9a19-0e4a6070562a-kube-api-access-fgdmz\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188517 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d75f7765-1a6b-4bb5-819a-6891694a29c8-logs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188538 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188562 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vgnl\" (UniqueName: \"kubernetes.io/projected/d75f7765-1a6b-4bb5-819a-6891694a29c8-kube-api-access-9vgnl\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188583 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188606 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-config-data\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188650 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-config-data\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.188691 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-public-tls-certs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.189767 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d75f7765-1a6b-4bb5-819a-6891694a29c8-logs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.194206 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-config-data\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.194226 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-config-data\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.194660 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.195930 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-public-tls-certs\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.196234 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.202079 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.207297 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vgnl\" (UniqueName: \"kubernetes.io/projected/d75f7765-1a6b-4bb5-819a-6891694a29c8-kube-api-access-9vgnl\") pod \"nova-api-0\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.210417 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgdmz\" (UniqueName: \"kubernetes.io/projected/f5158a65-ce89-4a2b-9a19-0e4a6070562a-kube-api-access-fgdmz\") pod \"nova-scheduler-0\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.263029 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.325061 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:10:09 crc kubenswrapper[4811]: W0128 16:10:09.729110 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd75f7765_1a6b_4bb5_819a_6891694a29c8.slice/crio-824fcca7539f34f8ff4e74fcad43febe26cfb9a9224814e46dbcc0d1481c8897 WatchSource:0}: Error finding container 824fcca7539f34f8ff4e74fcad43febe26cfb9a9224814e46dbcc0d1481c8897: Status 404 returned error can't find the container with id 824fcca7539f34f8ff4e74fcad43febe26cfb9a9224814e46dbcc0d1481c8897 Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.743295 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.824946 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d75f7765-1a6b-4bb5-819a-6891694a29c8","Type":"ContainerStarted","Data":"824fcca7539f34f8ff4e74fcad43febe26cfb9a9224814e46dbcc0d1481c8897"} Jan 28 16:10:09 crc kubenswrapper[4811]: I0128 16:10:09.843026 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:09 crc kubenswrapper[4811]: W0128 16:10:09.848878 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5158a65_ce89_4a2b_9a19_0e4a6070562a.slice/crio-742362a3f5bc1738537bdc9aded1dc0550718c399c5f461f7a2ff428fe2ff892 WatchSource:0}: Error finding container 742362a3f5bc1738537bdc9aded1dc0550718c399c5f461f7a2ff428fe2ff892: Status 404 returned error can't find the container with id 742362a3f5bc1738537bdc9aded1dc0550718c399c5f461f7a2ff428fe2ff892 Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.350391 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31379f69-79a1-4684-aab1-3dc6cde7f876" path="/var/lib/kubelet/pods/31379f69-79a1-4684-aab1-3dc6cde7f876/volumes" Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.351528 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f59a9a13-6ed9-4964-9e5a-75a433f2af91" path="/var/lib/kubelet/pods/f59a9a13-6ed9-4964-9e5a-75a433f2af91/volumes" Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.544031 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:58156->10.217.0.196:8775: read: connection reset by peer" Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.544034 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:58172->10.217.0.196:8775: read: connection reset by peer" Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.837900 4811 generic.go:334] "Generic (PLEG): container finished" podID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerID="265aa5c9fcb4bc9488c9027fffb3e87b0793f23d0e60be446a90ca677d4a1e3d" exitCode=0 Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.838204 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d222430c-ef7f-46cd-b963-c500fec7bcc5","Type":"ContainerDied","Data":"265aa5c9fcb4bc9488c9027fffb3e87b0793f23d0e60be446a90ca677d4a1e3d"} Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.839692 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5158a65-ce89-4a2b-9a19-0e4a6070562a","Type":"ContainerStarted","Data":"d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe"} Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.839728 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5158a65-ce89-4a2b-9a19-0e4a6070562a","Type":"ContainerStarted","Data":"742362a3f5bc1738537bdc9aded1dc0550718c399c5f461f7a2ff428fe2ff892"} Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.841515 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d75f7765-1a6b-4bb5-819a-6891694a29c8","Type":"ContainerStarted","Data":"7f7face6d95fe804267fa4f30c1e01cfdaa41613853274a35697b679666f13c6"} Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.841563 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d75f7765-1a6b-4bb5-819a-6891694a29c8","Type":"ContainerStarted","Data":"7bb42b27e1f14ad69f131fe0e78ac3ea542276272ff3e8a4de4b7fc31e901928"} Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.878723 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.87870511 podStartE2EDuration="2.87870511s" podCreationTimestamp="2026-01-28 16:10:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:10:10.86211306 +0000 UTC m=+1503.616476643" watchObservedRunningTime="2026-01-28 16:10:10.87870511 +0000 UTC m=+1503.633068693" Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.888357 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.888333981 podStartE2EDuration="2.888333981s" podCreationTimestamp="2026-01-28 16:10:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:10:10.883407857 +0000 UTC m=+1503.637771440" watchObservedRunningTime="2026-01-28 16:10:10.888333981 +0000 UTC m=+1503.642697554" Jan 28 16:10:10 crc kubenswrapper[4811]: I0128 16:10:10.949899 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.044491 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-combined-ca-bundle\") pod \"d222430c-ef7f-46cd-b963-c500fec7bcc5\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.044573 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d222430c-ef7f-46cd-b963-c500fec7bcc5-logs\") pod \"d222430c-ef7f-46cd-b963-c500fec7bcc5\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.044604 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-nova-metadata-tls-certs\") pod \"d222430c-ef7f-46cd-b963-c500fec7bcc5\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.044682 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-config-data\") pod \"d222430c-ef7f-46cd-b963-c500fec7bcc5\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.044743 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c4qh\" (UniqueName: \"kubernetes.io/projected/d222430c-ef7f-46cd-b963-c500fec7bcc5-kube-api-access-8c4qh\") pod \"d222430c-ef7f-46cd-b963-c500fec7bcc5\" (UID: \"d222430c-ef7f-46cd-b963-c500fec7bcc5\") " Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.045072 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d222430c-ef7f-46cd-b963-c500fec7bcc5-logs" (OuterVolumeSpecName: "logs") pod "d222430c-ef7f-46cd-b963-c500fec7bcc5" (UID: "d222430c-ef7f-46cd-b963-c500fec7bcc5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.045167 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d222430c-ef7f-46cd-b963-c500fec7bcc5-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.051771 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d222430c-ef7f-46cd-b963-c500fec7bcc5-kube-api-access-8c4qh" (OuterVolumeSpecName: "kube-api-access-8c4qh") pod "d222430c-ef7f-46cd-b963-c500fec7bcc5" (UID: "d222430c-ef7f-46cd-b963-c500fec7bcc5"). InnerVolumeSpecName "kube-api-access-8c4qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.085263 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d222430c-ef7f-46cd-b963-c500fec7bcc5" (UID: "d222430c-ef7f-46cd-b963-c500fec7bcc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.089956 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-config-data" (OuterVolumeSpecName: "config-data") pod "d222430c-ef7f-46cd-b963-c500fec7bcc5" (UID: "d222430c-ef7f-46cd-b963-c500fec7bcc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.146856 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c4qh\" (UniqueName: \"kubernetes.io/projected/d222430c-ef7f-46cd-b963-c500fec7bcc5-kube-api-access-8c4qh\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.146897 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.146910 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.174595 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d222430c-ef7f-46cd-b963-c500fec7bcc5" (UID: "d222430c-ef7f-46cd-b963-c500fec7bcc5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.249017 4811 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d222430c-ef7f-46cd-b963-c500fec7bcc5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.855628 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.861490 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d222430c-ef7f-46cd-b963-c500fec7bcc5","Type":"ContainerDied","Data":"a837401250304d8fe6b32878e320eeebc261ca5dd338fe43b2776e87eb8ebc32"} Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.861550 4811 scope.go:117] "RemoveContainer" containerID="265aa5c9fcb4bc9488c9027fffb3e87b0793f23d0e60be446a90ca677d4a1e3d" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.902933 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.904978 4811 scope.go:117] "RemoveContainer" containerID="df9cffff43f236bf7b6aac1086fe3a61f9363207fc596a077ff0dc9718fdb5c3" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.935038 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.953963 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:11 crc kubenswrapper[4811]: E0128 16:10:11.954685 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-log" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.954705 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-log" Jan 28 16:10:11 crc kubenswrapper[4811]: E0128 16:10:11.954739 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-metadata" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.954746 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-metadata" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.954928 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-metadata" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.954942 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" containerName="nova-metadata-log" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.956022 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.957824 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.958356 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 16:10:11 crc kubenswrapper[4811]: I0128 16:10:11.967863 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.065645 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqkn\" (UniqueName: \"kubernetes.io/projected/15d6fdf2-a4f1-4f30-b621-31877f96868f-kube-api-access-wwqkn\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.065738 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d6fdf2-a4f1-4f30-b621-31877f96868f-logs\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.065802 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-config-data\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.065847 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.065889 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.167540 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-config-data\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.168193 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.168521 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.168557 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqkn\" (UniqueName: \"kubernetes.io/projected/15d6fdf2-a4f1-4f30-b621-31877f96868f-kube-api-access-wwqkn\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.168628 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d6fdf2-a4f1-4f30-b621-31877f96868f-logs\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.169051 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d6fdf2-a4f1-4f30-b621-31877f96868f-logs\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.171160 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.173064 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-config-data\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.185508 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.189542 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqkn\" (UniqueName: \"kubernetes.io/projected/15d6fdf2-a4f1-4f30-b621-31877f96868f-kube-api-access-wwqkn\") pod \"nova-metadata-0\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.275390 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.349122 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d222430c-ef7f-46cd-b963-c500fec7bcc5" path="/var/lib/kubelet/pods/d222430c-ef7f-46cd-b963-c500fec7bcc5/volumes" Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.720637 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:12 crc kubenswrapper[4811]: I0128 16:10:12.864245 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15d6fdf2-a4f1-4f30-b621-31877f96868f","Type":"ContainerStarted","Data":"daa161bbe0a16e8c10dd09e6edf58ce3eca14f136962f92a4a1ed26d11d6b208"} Jan 28 16:10:13 crc kubenswrapper[4811]: I0128 16:10:13.875480 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15d6fdf2-a4f1-4f30-b621-31877f96868f","Type":"ContainerStarted","Data":"751f6099de1911b3d0549631c9b2b444b7a26a8a63ae6b18819217bbb07da0f4"} Jan 28 16:10:13 crc kubenswrapper[4811]: I0128 16:10:13.875819 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15d6fdf2-a4f1-4f30-b621-31877f96868f","Type":"ContainerStarted","Data":"f1c697d3014cf2adc2ad5baf6f4f5abebc7a014d2d73bdad9756ab37a9f1de10"} Jan 28 16:10:13 crc kubenswrapper[4811]: I0128 16:10:13.899421 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.899400924 podStartE2EDuration="2.899400924s" podCreationTimestamp="2026-01-28 16:10:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 16:10:13.892873058 +0000 UTC m=+1506.647236661" watchObservedRunningTime="2026-01-28 16:10:13.899400924 +0000 UTC m=+1506.653764507" Jan 28 16:10:14 crc kubenswrapper[4811]: I0128 16:10:14.326523 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 16:10:15 crc kubenswrapper[4811]: I0128 16:10:15.340245 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:10:15 crc kubenswrapper[4811]: E0128 16:10:15.340652 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:10:17 crc kubenswrapper[4811]: I0128 16:10:17.276248 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 16:10:17 crc kubenswrapper[4811]: I0128 16:10:17.276784 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 16:10:19 crc kubenswrapper[4811]: I0128 16:10:19.263351 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 16:10:19 crc kubenswrapper[4811]: I0128 16:10:19.263803 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 16:10:19 crc kubenswrapper[4811]: I0128 16:10:19.325934 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 28 16:10:19 crc kubenswrapper[4811]: I0128 16:10:19.355541 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 28 16:10:19 crc kubenswrapper[4811]: I0128 16:10:19.980956 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 28 16:10:20 crc kubenswrapper[4811]: I0128 16:10:20.277878 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 28 16:10:20 crc kubenswrapper[4811]: I0128 16:10:20.278362 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 16:10:22 crc kubenswrapper[4811]: I0128 16:10:22.275515 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 16:10:22 crc kubenswrapper[4811]: I0128 16:10:22.275859 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 16:10:23 crc kubenswrapper[4811]: I0128 16:10:23.332659 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 28 16:10:23 crc kubenswrapper[4811]: I0128 16:10:23.332691 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 28 16:10:29 crc kubenswrapper[4811]: I0128 16:10:29.269590 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 16:10:29 crc kubenswrapper[4811]: I0128 16:10:29.271602 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 16:10:29 crc kubenswrapper[4811]: I0128 16:10:29.272621 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 16:10:29 crc kubenswrapper[4811]: I0128 16:10:29.275853 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 16:10:30 crc kubenswrapper[4811]: I0128 16:10:30.042876 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 16:10:30 crc kubenswrapper[4811]: I0128 16:10:30.049668 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 16:10:30 crc kubenswrapper[4811]: I0128 16:10:30.340557 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:10:30 crc kubenswrapper[4811]: E0128 16:10:30.341046 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:10:32 crc kubenswrapper[4811]: I0128 16:10:32.286616 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 16:10:32 crc kubenswrapper[4811]: I0128 16:10:32.286965 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 16:10:32 crc kubenswrapper[4811]: I0128 16:10:32.291659 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 16:10:32 crc kubenswrapper[4811]: I0128 16:10:32.292718 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 16:10:36 crc kubenswrapper[4811]: I0128 16:10:36.301470 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 28 16:10:45 crc kubenswrapper[4811]: I0128 16:10:45.339327 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:10:45 crc kubenswrapper[4811]: E0128 16:10:45.340000 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.481781 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.482458 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" containerName="openstackclient" containerID="cri-o://17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886" gracePeriod=2 Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.507926 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.515101 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-89d7-account-create-update-7tjkx"] Jan 28 16:10:55 crc kubenswrapper[4811]: E0128 16:10:55.515565 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" containerName="openstackclient" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.515586 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" containerName="openstackclient" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.515784 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" containerName="openstackclient" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.516366 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.530203 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.540605 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-89d7-account-create-update-mc8pj"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.571553 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-89d7-account-create-update-mc8pj"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.596853 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-89d7-account-create-update-7tjkx"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.630264 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64z2q\" (UniqueName: \"kubernetes.io/projected/4815adcf-373b-4460-9f8c-d6dfaf471196-kube-api-access-64z2q\") pod \"cinder-89d7-account-create-update-7tjkx\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.630574 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4815adcf-373b-4460-9f8c-d6dfaf471196-operator-scripts\") pod \"cinder-89d7-account-create-update-7tjkx\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.658313 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-917c-account-create-update-slsg4"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.659619 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.663983 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.699713 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-917c-account-create-update-slsg4"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.731482 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-t9s25"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.732969 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64z2q\" (UniqueName: \"kubernetes.io/projected/4815adcf-373b-4460-9f8c-d6dfaf471196-kube-api-access-64z2q\") pod \"cinder-89d7-account-create-update-7tjkx\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.733026 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13f72e20-02af-43d9-8400-8180b175795e-operator-scripts\") pod \"barbican-917c-account-create-update-slsg4\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.733089 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4815adcf-373b-4460-9f8c-d6dfaf471196-operator-scripts\") pod \"cinder-89d7-account-create-update-7tjkx\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.733113 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgc76\" (UniqueName: \"kubernetes.io/projected/13f72e20-02af-43d9-8400-8180b175795e-kube-api-access-kgc76\") pod \"barbican-917c-account-create-update-slsg4\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.734076 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4815adcf-373b-4460-9f8c-d6dfaf471196-operator-scripts\") pod \"cinder-89d7-account-create-update-7tjkx\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.771709 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-917c-account-create-update-kfdqq"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.788514 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64z2q\" (UniqueName: \"kubernetes.io/projected/4815adcf-373b-4460-9f8c-d6dfaf471196-kube-api-access-64z2q\") pod \"cinder-89d7-account-create-update-7tjkx\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.823624 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-t9s25"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.840199 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgc76\" (UniqueName: \"kubernetes.io/projected/13f72e20-02af-43d9-8400-8180b175795e-kube-api-access-kgc76\") pod \"barbican-917c-account-create-update-slsg4\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.840329 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13f72e20-02af-43d9-8400-8180b175795e-operator-scripts\") pod \"barbican-917c-account-create-update-slsg4\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.841021 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13f72e20-02af-43d9-8400-8180b175795e-operator-scripts\") pod \"barbican-917c-account-create-update-slsg4\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.856842 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.861099 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-917c-account-create-update-kfdqq"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.920794 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.988008 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-k598l"] Jan 28 16:10:55 crc kubenswrapper[4811]: I0128 16:10:55.989208 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.003647 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.020118 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgc76\" (UniqueName: \"kubernetes.io/projected/13f72e20-02af-43d9-8400-8180b175795e-kube-api-access-kgc76\") pod \"barbican-917c-account-create-update-slsg4\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.022976 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-k598l"] Jan 28 16:10:56 crc kubenswrapper[4811]: E0128 16:10:56.057293 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:56 crc kubenswrapper[4811]: E0128 16:10:56.057341 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data podName:3b7599aa-7114-46c9-bf38-b6d735b75326 nodeName:}" failed. No retries permitted until 2026-01-28 16:10:56.557328664 +0000 UTC m=+1549.311692247 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data") pod "rabbitmq-cell1-server-0" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326") : configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.102212 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-2952-account-create-update-df9kg"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.103484 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.112328 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.125274 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2952-account-create-update-df9kg"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.147070 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2952-account-create-update-8fmfb"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.194733 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbhv4\" (UniqueName: \"kubernetes.io/projected/a2ab1b57-166e-4409-8b71-0bb922787623-kube-api-access-bbhv4\") pod \"root-account-create-update-k598l\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.196451 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ab1b57-166e-4409-8b71-0bb922787623-operator-scripts\") pod \"root-account-create-update-k598l\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.311985 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.312483 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-hzmxs"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.313710 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.314814 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwctl\" (UniqueName: \"kubernetes.io/projected/7b876d22-ae70-479d-82b1-113a9d760a0f-kube-api-access-fwctl\") pod \"nova-api-2952-account-create-update-df9kg\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.314883 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ab1b57-166e-4409-8b71-0bb922787623-operator-scripts\") pod \"root-account-create-update-k598l\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.314911 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b876d22-ae70-479d-82b1-113a9d760a0f-operator-scripts\") pod \"nova-api-2952-account-create-update-df9kg\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.314975 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbhv4\" (UniqueName: \"kubernetes.io/projected/a2ab1b57-166e-4409-8b71-0bb922787623-kube-api-access-bbhv4\") pod \"root-account-create-update-k598l\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.316265 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ab1b57-166e-4409-8b71-0bb922787623-operator-scripts\") pod \"root-account-create-update-k598l\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.328573 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.398192 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbhv4\" (UniqueName: \"kubernetes.io/projected/a2ab1b57-166e-4409-8b71-0bb922787623-kube-api-access-bbhv4\") pod \"root-account-create-update-k598l\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.423762 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df573b1-31df-4d78-9e7a-66cf18d81aae-operator-scripts\") pod \"nova-cell0-99f2-account-create-update-hzmxs\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.423837 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmvtx\" (UniqueName: \"kubernetes.io/projected/8df573b1-31df-4d78-9e7a-66cf18d81aae-kube-api-access-hmvtx\") pod \"nova-cell0-99f2-account-create-update-hzmxs\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.423957 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwctl\" (UniqueName: \"kubernetes.io/projected/7b876d22-ae70-479d-82b1-113a9d760a0f-kube-api-access-fwctl\") pod \"nova-api-2952-account-create-update-df9kg\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.424045 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b876d22-ae70-479d-82b1-113a9d760a0f-operator-scripts\") pod \"nova-api-2952-account-create-update-df9kg\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.426804 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b876d22-ae70-479d-82b1-113a9d760a0f-operator-scripts\") pod \"nova-api-2952-account-create-update-df9kg\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.431191 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="505639ba-81cf-4cae-b494-e88d944457ea" path="/var/lib/kubelet/pods/505639ba-81cf-4cae-b494-e88d944457ea/volumes" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.432059 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b151453e-b13d-4bbe-b156-c7d264aefd8a" path="/var/lib/kubelet/pods/b151453e-b13d-4bbe-b156-c7d264aefd8a/volumes" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.432572 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4a0a450-f81c-41d2-8400-b12baf800baa" path="/var/lib/kubelet/pods/b4a0a450-f81c-41d2-8400-b12baf800baa/volumes" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.433110 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-2952-account-create-update-8fmfb"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.519477 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-hzmxs"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.531713 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df573b1-31df-4d78-9e7a-66cf18d81aae-operator-scripts\") pod \"nova-cell0-99f2-account-create-update-hzmxs\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.531760 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmvtx\" (UniqueName: \"kubernetes.io/projected/8df573b1-31df-4d78-9e7a-66cf18d81aae-kube-api-access-hmvtx\") pod \"nova-cell0-99f2-account-create-update-hzmxs\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.532607 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df573b1-31df-4d78-9e7a-66cf18d81aae-operator-scripts\") pod \"nova-cell0-99f2-account-create-update-hzmxs\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.533056 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwctl\" (UniqueName: \"kubernetes.io/projected/7b876d22-ae70-479d-82b1-113a9d760a0f-kube-api-access-fwctl\") pod \"nova-api-2952-account-create-update-df9kg\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.556474 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-xdmdm"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.572469 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.573053 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="openstack-network-exporter" containerID="cri-o://fa6828f22b3be25a45cd83e03b0c456790ff1d215506294c5c624f55eae5a0fc" gracePeriod=300 Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.587091 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-xdmdm"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.610930 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmvtx\" (UniqueName: \"kubernetes.io/projected/8df573b1-31df-4d78-9e7a-66cf18d81aae-kube-api-access-hmvtx\") pod \"nova-cell0-99f2-account-create-update-hzmxs\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.611563 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:10:56 crc kubenswrapper[4811]: E0128 16:10:56.635239 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:56 crc kubenswrapper[4811]: E0128 16:10:56.635296 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data podName:3b7599aa-7114-46c9-bf38-b6d735b75326 nodeName:}" failed. No retries permitted until 2026-01-28 16:10:57.635281136 +0000 UTC m=+1550.389644719 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data") pod "rabbitmq-cell1-server-0" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326") : configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.673272 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-c7ca-account-create-update-mmwlc"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.684782 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.685521 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="openstack-network-exporter" containerID="cri-o://9dbb3f5632b8a35166eed8631e1ad8ce2fea3e6580b10ef5e0e18c13b29de1d4" gracePeriod=300 Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.686233 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-k598l" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.720866 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-hdczs"] Jan 28 16:10:56 crc kubenswrapper[4811]: E0128 16:10:56.738337 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 28 16:10:56 crc kubenswrapper[4811]: E0128 16:10:56.738401 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data podName:23095127-8b86-445a-8c32-1e6bc14bf05e nodeName:}" failed. No retries permitted until 2026-01-28 16:10:57.238377194 +0000 UTC m=+1549.992740777 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data") pod "rabbitmq-server-0" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e") : configmap "rabbitmq-config-data" not found Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.745737 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-hdczs"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.761260 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.766925 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-c7ca-account-create-update-mmwlc"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.797873 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.798486 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-c25e-account-create-update-r58t5"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.813688 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-395c-account-create-update-gl7x6"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.818611 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-395c-account-create-update-gl7x6"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.835601 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="galera" probeResult="failure" output="command timed out" Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.844793 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-c25e-account-create-update-r58t5"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.853123 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.853400 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="openstack-network-exporter" containerID="cri-o://1112ae05c1cf02d134ea38159d36c4ca6bf5c3e27f26f8a7397f9fd2b51e726b" gracePeriod=30 Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.853813 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="ovn-northd" containerID="cri-o://4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94" gracePeriod=30 Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.869165 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e221-account-create-update-6bntv"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.918490 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e221-account-create-update-6bntv"] Jan 28 16:10:56 crc kubenswrapper[4811]: I0128 16:10:56.989563 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-bsvjv"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.009757 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-bsvjv"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.035655 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-4bw6q"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.055492 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-nh6rs"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.074188 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-nh6rs"] Jan 28 16:10:57 crc kubenswrapper[4811]: W0128 16:10:57.100543 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4815adcf_373b_4460_9f8c_d6dfaf471196.slice/crio-b532553aef6d0c89b3606b4c1d13a53f2558f9e48a7a31264a5870ab31b91da5 WatchSource:0}: Error finding container b532553aef6d0c89b3606b4c1d13a53f2558f9e48a7a31264a5870ab31b91da5: Status 404 returned error can't find the container with id b532553aef6d0c89b3606b4c1d13a53f2558f9e48a7a31264a5870ab31b91da5 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.102373 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-4bw6q"] Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.136142 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:57 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: if [ -n "cinder" ]; then Jan 28 16:10:57 crc kubenswrapper[4811]: GRANT_DATABASE="cinder" Jan 28 16:10:57 crc kubenswrapper[4811]: else Jan 28 16:10:57 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:57 crc kubenswrapper[4811]: fi Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:57 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:57 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:57 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:57 crc kubenswrapper[4811]: # support updates Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.138934 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-89d7-account-create-update-7tjkx" podUID="4815adcf-373b-4460-9f8c-d6dfaf471196" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.194859 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-vhxrm"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.229201 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="ovsdbserver-sb" containerID="cri-o://a7b75f68bd55325c1892a8a68699e72299412f605399d3727dc7a8c78169fd1c" gracePeriod=300 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.253103 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="ovsdbserver-nb" containerID="cri-o://dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5" gracePeriod=300 Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.257305 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.257372 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data podName:23095127-8b86-445a-8c32-1e6bc14bf05e nodeName:}" failed. No retries permitted until 2026-01-28 16:10:58.257357516 +0000 UTC m=+1551.011721099 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data") pod "rabbitmq-server-0" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e") : configmap "rabbitmq-config-data" not found Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.267347 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-vhxrm"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.292252 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-89d7-account-create-update-7tjkx"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.311714 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-vmzwm"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.341302 4811 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/ovn-controller-fxxlf" secret="" err="secret \"ovncontroller-ovncontroller-dockercfg-z7vl2\" not found" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.344059 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-vmzwm"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.346654 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.378563 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.470672 4811 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.470743 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts podName:1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e nodeName:}" failed. No retries permitted until 2026-01-28 16:10:57.970721266 +0000 UTC m=+1550.725084849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts") pod "ovn-controller-fxxlf" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e") : configmap "ovncontroller-scripts" not found Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.474707 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fxxlf"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.485276 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-d62hg"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.485599 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-d62hg" podUID="3e79a2a6-54aa-4533-95ee-9deb5c46811b" containerName="openstack-network-exporter" containerID="cri-o://2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.505798 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-wzxwb"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.531798 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ad905f29-3683-466d-8acd-192cc5ac0960/ovsdbserver-sb/0.log" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.531857 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad905f29-3683-466d-8acd-192cc5ac0960" containerID="fa6828f22b3be25a45cd83e03b0c456790ff1d215506294c5c624f55eae5a0fc" exitCode=2 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.531879 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad905f29-3683-466d-8acd-192cc5ac0960" containerID="a7b75f68bd55325c1892a8a68699e72299412f605399d3727dc7a8c78169fd1c" exitCode=143 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.532000 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ad905f29-3683-466d-8acd-192cc5ac0960","Type":"ContainerDied","Data":"fa6828f22b3be25a45cd83e03b0c456790ff1d215506294c5c624f55eae5a0fc"} Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.532035 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ad905f29-3683-466d-8acd-192cc5ac0960","Type":"ContainerDied","Data":"a7b75f68bd55325c1892a8a68699e72299412f605399d3727dc7a8c78169fd1c"} Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.533970 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-62xln"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.567571 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-62xln"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.589338 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.589678 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="cinder-scheduler" containerID="cri-o://86828187d9cd31239028cb631f45f10cb80918e0ec369e86d8c2a82f84f3b061" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.590021 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="probe" containerID="cri-o://d1bc0298c4d4006930b0858e1c0d43af29a7daf4a41eaa0feae8d160b91e3120" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.597887 4811 generic.go:334] "Generic (PLEG): container finished" podID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerID="1112ae05c1cf02d134ea38159d36c4ca6bf5c3e27f26f8a7397f9fd2b51e726b" exitCode=2 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.598006 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6dd44d16-5b30-493c-9dd2-1ba856a4393a","Type":"ContainerDied","Data":"1112ae05c1cf02d134ea38159d36c4ca6bf5c3e27f26f8a7397f9fd2b51e726b"} Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.605536 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-89d7-account-create-update-7tjkx" event={"ID":"4815adcf-373b-4460-9f8c-d6dfaf471196","Type":"ContainerStarted","Data":"b532553aef6d0c89b3606b4c1d13a53f2558f9e48a7a31264a5870ab31b91da5"} Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.607513 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-s5tc2"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.607858 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerName="dnsmasq-dns" containerID="cri-o://2737a1ce65612a85e61445090a2668841ba19af66f524535b6accf0e61ca2d75" gracePeriod=10 Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.617160 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:57 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: if [ -n "cinder" ]; then Jan 28 16:10:57 crc kubenswrapper[4811]: GRANT_DATABASE="cinder" Jan 28 16:10:57 crc kubenswrapper[4811]: else Jan 28 16:10:57 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:57 crc kubenswrapper[4811]: fi Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:57 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:57 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:57 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:57 crc kubenswrapper[4811]: # support updates Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.618871 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-89d7-account-create-update-7tjkx" podUID="4815adcf-373b-4460-9f8c-d6dfaf471196" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.654621 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.654913 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bc54361e-5f34-4253-b91f-b1683e944191/ovsdbserver-nb/0.log" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.654959 4811 generic.go:334] "Generic (PLEG): container finished" podID="bc54361e-5f34-4253-b91f-b1683e944191" containerID="9dbb3f5632b8a35166eed8631e1ad8ce2fea3e6580b10ef5e0e18c13b29de1d4" exitCode=2 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.655158 4811 generic.go:334] "Generic (PLEG): container finished" podID="bc54361e-5f34-4253-b91f-b1683e944191" containerID="dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5" exitCode=143 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.655196 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bc54361e-5f34-4253-b91f-b1683e944191","Type":"ContainerDied","Data":"9dbb3f5632b8a35166eed8631e1ad8ce2fea3e6580b10ef5e0e18c13b29de1d4"} Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.655222 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bc54361e-5f34-4253-b91f-b1683e944191","Type":"ContainerDied","Data":"dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5"} Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.657735 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api-log" containerID="cri-o://1e0db5f5c865912285b18890df2afbfd3dcd0f3d6c9ece1eee5e5a4b85b96528" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.658279 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api" containerID="cri-o://f0a91b4e447ff1a52135fb776145bb39ff121699ece20c0eab9dbf3f1f9735a1" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.671268 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.671334 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data podName:3b7599aa-7114-46c9-bf38-b6d735b75326 nodeName:}" failed. No retries permitted until 2026-01-28 16:10:59.671317229 +0000 UTC m=+1552.425680812 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data") pod "rabbitmq-cell1-server-0" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326") : configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.697515 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.697779 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-log" containerID="cri-o://84f6ea378d0b165999f2606c252834dba8614670922d49e48387042f20f1cf01" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.698225 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-httpd" containerID="cri-o://a156c42e817f2655b9c4afbf1946c7359670560b8886dbd5e1f1475d498cce7d" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.723096 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-z7x67"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.787023 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-z7x67"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.833571 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7b7c5df494-2hz6m"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.833891 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7b7c5df494-2hz6m" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-log" containerID="cri-o://f494267049de0afa9aadacc83552d047aaa6b559a44a9bc8ab504e3eb508fb84" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.834351 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7b7c5df494-2hz6m" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-api" containerID="cri-o://6cba40ddbfcb2754d34d28f3d64877131d7be93e51d4f11947d0c71c0b815b3f" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.872554 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.872835 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-log" containerID="cri-o://039d7a3660d21b25f1bc174e77b3130cc47d2cdf0049929c92da48d3adb0d4b0" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.873291 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-httpd" containerID="cri-o://d388772dd1846d95d234d7f0fb94958aae8f34f960a0d56e2efdfefc4e673f8d" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.922267 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-89d7-account-create-update-7tjkx"] Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.931215 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:57 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: if [ -n "barbican" ]; then Jan 28 16:10:57 crc kubenswrapper[4811]: GRANT_DATABASE="barbican" Jan 28 16:10:57 crc kubenswrapper[4811]: else Jan 28 16:10:57 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:57 crc kubenswrapper[4811]: fi Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:57 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:57 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:57 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:57 crc kubenswrapper[4811]: # support updates Jan 28 16:10:57 crc kubenswrapper[4811]: Jan 28 16:10:57 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.931546 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-2f6fc"] Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.935366 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-917c-account-create-update-slsg4" podUID="13f72e20-02af-43d9-8400-8180b175795e" Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.938110 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.964440 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-2f6fc"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.981801 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-86846dcc5c-hkg4d"] Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.982073 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-86846dcc5c-hkg4d" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-api" containerID="cri-o://51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: I0128 16:10:57.985417 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-86846dcc5c-hkg4d" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-httpd" containerID="cri-o://d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605" gracePeriod=30 Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.986842 4811 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 28 16:10:57 crc kubenswrapper[4811]: E0128 16:10:57.986893 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts podName:1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e nodeName:}" failed. No retries permitted until 2026-01-28 16:10:58.986877051 +0000 UTC m=+1551.741240634 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts") pod "ovn-controller-fxxlf" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e") : configmap "ovncontroller-scripts" not found Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.014247 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-k2v5v"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.066264 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="rabbitmq" containerID="cri-o://e7577902b3c5f2e0c6da1d3f007db93314f77eb274a3406fc13d0cf0a0a1d27e" gracePeriod=604800 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.086557 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-k2v5v"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.128866 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9sxhc"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.149127 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9sxhc"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.172561 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173064 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-server" containerID="cri-o://fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173082 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-updater" containerID="cri-o://a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173178 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-auditor" containerID="cri-o://df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173217 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-replicator" containerID="cri-o://b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173248 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-server" containerID="cri-o://037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173276 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-updater" containerID="cri-o://de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173304 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-auditor" containerID="cri-o://4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173331 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-replicator" containerID="cri-o://edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173359 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-server" containerID="cri-o://bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173385 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-reaper" containerID="cri-o://91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173413 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-auditor" containerID="cri-o://37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173459 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-replicator" containerID="cri-o://417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173493 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="swift-recon-cron" containerID="cri-o://4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173616 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="rsync" containerID="cri-o://07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.173696 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-expirer" containerID="cri-o://2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.187547 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.187857 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-log" containerID="cri-o://f1c697d3014cf2adc2ad5baf6f4f5abebc7a014d2d73bdad9756ab37a9f1de10" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.188340 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-metadata" containerID="cri-o://751f6099de1911b3d0549631c9b2b444b7a26a8a63ae6b18819217bbb07da0f4" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.211732 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.212008 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-log" containerID="cri-o://7bb42b27e1f14ad69f131fe0e78ac3ea542276272ff3e8a4de4b7fc31e901928" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.212186 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-api" containerID="cri-o://7f7face6d95fe804267fa4f30c1e01cfdaa41613853274a35697b679666f13c6" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.286388 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-917c-account-create-update-slsg4"] Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.296515 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.296628 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data podName:23095127-8b86-445a-8c32-1e6bc14bf05e nodeName:}" failed. No retries permitted until 2026-01-28 16:11:00.296613145 +0000 UTC m=+1553.050976728 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data") pod "rabbitmq-server-0" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e") : configmap "rabbitmq-config-data" not found Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.296957 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-56fd96c5b5-d2r9l"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.296991 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-h47z9"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.297202 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-httpd" containerID="cri-o://0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.297536 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-server" containerID="cri-o://a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.314611 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-78bd467b68-h25ls"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.314911 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-78bd467b68-h25ls" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api-log" containerID="cri-o://656706d31855991db104dc63417a4a2dad789a13059f74bc9caee3f4a9998a35" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.315071 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-78bd467b68-h25ls" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api" containerID="cri-o://f3194b493d5ddf32d8f9d2a436bb598bd51e3c3d501ca80d66d47ad0bff2fffb" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.381918 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" containerID="cri-o://0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.387221 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f54b0ba-3a97-41d2-a048-01d6dd6daeca" path="/var/lib/kubelet/pods/2f54b0ba-3a97-41d2-a048-01d6dd6daeca/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.388396 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd67fd5-42d7-409d-8c89-3b2e9fe323c8" path="/var/lib/kubelet/pods/3dd67fd5-42d7-409d-8c89-3b2e9fe323c8/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.388906 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c1b1a49-b786-45bb-8f17-d29c10d099bc" path="/var/lib/kubelet/pods/4c1b1a49-b786-45bb-8f17-d29c10d099bc/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.389475 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464" path="/var/lib/kubelet/pods/4e8d4fb1-4141-4cb1-b9a9-2d2adf4a0464/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.389940 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55b5b721-9672-447d-b4d8-b53b7de5ce2f" path="/var/lib/kubelet/pods/55b5b721-9672-447d-b4d8-b53b7de5ce2f/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.391742 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="677fa641-ca48-40fe-8b91-40147f1376e5" path="/var/lib/kubelet/pods/677fa641-ca48-40fe-8b91-40147f1376e5/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.392265 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7acf4fe9-fb7e-4260-9976-04deecc1264d" path="/var/lib/kubelet/pods/7acf4fe9-fb7e-4260-9976-04deecc1264d/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.393336 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db6998b-1c77-4473-8656-6e64c5f14688" path="/var/lib/kubelet/pods/7db6998b-1c77-4473-8656-6e64c5f14688/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.394283 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91de0bcd-1c43-42d8-8582-ba86e33b81c2" path="/var/lib/kubelet/pods/91de0bcd-1c43-42d8-8582-ba86e33b81c2/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.394980 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6f56fef-7365-409e-beaa-1b91d5ea44c4" path="/var/lib/kubelet/pods/a6f56fef-7365-409e-beaa-1b91d5ea44c4/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.395688 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1" path="/var/lib/kubelet/pods/b01c05f3-e9c1-4fcc-9f2c-c5ea4fb82ff1/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.397287 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b38aa2ec-ec93-453c-8a6f-7943114f5047" path="/var/lib/kubelet/pods/b38aa2ec-ec93-453c-8a6f-7943114f5047/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.397837 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce03e462-76ab-488d-81d9-578a4105320c" path="/var/lib/kubelet/pods/ce03e462-76ab-488d-81d9-578a4105320c/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.398328 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d00b71d9-55f0-4ce4-9c27-1471372ef467" path="/var/lib/kubelet/pods/d00b71d9-55f0-4ce4-9c27-1471372ef467/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.398818 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d853eecf-d953-46ff-bf52-92c60a9213f6" path="/var/lib/kubelet/pods/d853eecf-d953-46ff-bf52-92c60a9213f6/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.400281 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e06e289b-fb43-4e7d-999a-38d8a4fc2178" path="/var/lib/kubelet/pods/e06e289b-fb43-4e7d-999a-38d8a4fc2178/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.400786 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa06bb82-ce18-4cad-b539-b9ec87700437" path="/var/lib/kubelet/pods/fa06bb82-ce18-4cad-b539-b9ec87700437/volumes" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.403349 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ad905f29-3683-466d-8acd-192cc5ac0960/ovsdbserver-sb/0.log" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.403545 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.403988 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-h47z9"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.404026 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.404046 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5bd9c6c974-98zpp"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.404059 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.404071 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-kqxgl"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.404594 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener-log" containerID="cri-o://29723b95d76a6ab96074381d9690ad4cd770a48e64bafab8c23717a36eaddbf2" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.404636 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener" containerID="cri-o://536a91cb8574f375dfd0ebc16db3f46162b78ee4bd2c334c6a012f5ff6e1ed7a" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.406883 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-lb2cx"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.463237 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-kqxgl"] Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.490231 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5 is running failed: container process not found" containerID="dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.503346 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5 is running failed: container process not found" containerID="dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.503501 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-69cfb4f585-8qngp"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.503760 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-69cfb4f585-8qngp" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker-log" containerID="cri-o://89af73945fbafecef0415a87ddc471cbf18d61957ff14ef2a2681bac6cbcc415" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.504241 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5 is running failed: container process not found" containerID="dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.504324 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="ovsdbserver-nb" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.506122 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-69cfb4f585-8qngp" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker" containerID="cri-o://fcc510fe7aab0a06fcaa0e25c25b295dadfdee8c254acdff99aa414590067770" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.514343 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="rabbitmq" containerID="cri-o://783bfe55389f6600eee189683b41f4d30b0ec2595be48aebc9462d419fec02fc" gracePeriod=604800 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.533253 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.555573 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bc54361e-5f34-4253-b91f-b1683e944191/ovsdbserver-nb/0.log" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.555691 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.563262 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-d62hg_3e79a2a6-54aa-4533-95ee-9deb5c46811b/openstack-network-exporter/0.log" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.563333 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.572408 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-lb2cx"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.598478 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-917c-account-create-update-slsg4"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.605834 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-x2dtb"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.612992 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-mdvqk"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.610900 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-metrics-certs-tls-certs\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616771 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-combined-ca-bundle\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616823 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e79a2a6-54aa-4533-95ee-9deb5c46811b-config\") pod \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616842 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-ovsdbserver-nb-tls-certs\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616866 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616915 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-metrics-certs-tls-certs\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616949 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-scripts\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.616989 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-combined-ca-bundle\") pod \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617008 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn88r\" (UniqueName: \"kubernetes.io/projected/bc54361e-5f34-4253-b91f-b1683e944191-kube-api-access-cn88r\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617037 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovs-rundir\") pod \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617081 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-scripts\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617147 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-metrics-certs-tls-certs\") pod \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617163 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config-secret\") pod \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617181 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6t26\" (UniqueName: \"kubernetes.io/projected/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-kube-api-access-h6t26\") pod \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617197 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp5zq\" (UniqueName: \"kubernetes.io/projected/ad905f29-3683-466d-8acd-192cc5ac0960-kube-api-access-rp5zq\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617215 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-config\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617239 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-combined-ca-bundle\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617257 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdbserver-sb-tls-certs\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617281 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc54361e-5f34-4253-b91f-b1683e944191-ovsdb-rundir\") pod \"bc54361e-5f34-4253-b91f-b1683e944191\" (UID: \"bc54361e-5f34-4253-b91f-b1683e944191\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617326 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617342 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovn-rundir\") pod \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617363 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config\") pod \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\" (UID: \"e9d8f73c-afb3-4e7e-a69a-e8899bf051cc\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617387 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdb-rundir\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617410 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-config\") pod \"ad905f29-3683-466d-8acd-192cc5ac0960\" (UID: \"ad905f29-3683-466d-8acd-192cc5ac0960\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617444 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtr46\" (UniqueName: \"kubernetes.io/projected/3e79a2a6-54aa-4533-95ee-9deb5c46811b-kube-api-access-mtr46\") pod \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.617462 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-combined-ca-bundle\") pod \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\" (UID: \"3e79a2a6-54aa-4533-95ee-9deb5c46811b\") " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.620150 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "3e79a2a6-54aa-4533-95ee-9deb5c46811b" (UID: "3e79a2a6-54aa-4533-95ee-9deb5c46811b"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.620330 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.621248 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc54361e-5f34-4253-b91f-b1683e944191-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.621307 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-config" (OuterVolumeSpecName: "config") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.621551 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e79a2a6-54aa-4533-95ee-9deb5c46811b-config" (OuterVolumeSpecName: "config") pod "3e79a2a6-54aa-4533-95ee-9deb5c46811b" (UID: "3e79a2a6-54aa-4533-95ee-9deb5c46811b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.621924 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-config" (OuterVolumeSpecName: "config") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.621973 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2952-account-create-update-df9kg"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.622491 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "3e79a2a6-54aa-4533-95ee-9deb5c46811b" (UID: "3e79a2a6-54aa-4533-95ee-9deb5c46811b"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.623626 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.626806 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-scripts" (OuterVolumeSpecName: "scripts") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.630997 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-scripts" (OuterVolumeSpecName: "scripts") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.632100 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-x2dtb"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.641527 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-mdvqk"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.656543 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-hzmxs"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.662640 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.666918 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.667273 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="0bb2deb3-c0dd-4102-96dd-b21dd187bf89" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.672720 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-kube-api-access-h6t26" (OuterVolumeSpecName: "kube-api-access-h6t26") pod "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" (UID: "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc"). InnerVolumeSpecName "kube-api-access-h6t26". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.673759 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad905f29-3683-466d-8acd-192cc5ac0960-kube-api-access-rp5zq" (OuterVolumeSpecName: "kube-api-access-rp5zq") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "kube-api-access-rp5zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.680116 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bc54361e-5f34-4253-b91f-b1683e944191/ovsdbserver-nb/0.log" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.680578 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.681506 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7kgb2"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.681558 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bc54361e-5f34-4253-b91f-b1683e944191","Type":"ContainerDied","Data":"e6713f981b12ee8623fc24a3159310d213021263913657306c24da8a342ccd1a"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.681594 4811 scope.go:117] "RemoveContainer" containerID="9dbb3f5632b8a35166eed8631e1ad8ce2fea3e6580b10ef5e0e18c13b29de1d4" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.687262 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e79a2a6-54aa-4533-95ee-9deb5c46811b-kube-api-access-mtr46" (OuterVolumeSpecName: "kube-api-access-mtr46") pod "3e79a2a6-54aa-4533-95ee-9deb5c46811b" (UID: "3e79a2a6-54aa-4533-95ee-9deb5c46811b"). InnerVolumeSpecName "kube-api-access-mtr46". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.693212 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.695218 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" containerName="nova-cell1-conductor-conductor" containerID="cri-o://16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.701411 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc54361e-5f34-4253-b91f-b1683e944191-kube-api-access-cn88r" (OuterVolumeSpecName: "kube-api-access-cn88r") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "kube-api-access-cn88r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.704705 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ad905f29-3683-466d-8acd-192cc5ac0960/ovsdbserver-sb/0.log" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.705506 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ad905f29-3683-466d-8acd-192cc5ac0960","Type":"ContainerDied","Data":"3f95d4f8426c76a308eeb892a389d11c6c75e84393c6b08b5c16880d9aed5118"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.705678 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.712780 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" (UID: "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.714267 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7kgb2"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.715315 4811 generic.go:334] "Generic (PLEG): container finished" podID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerID="89af73945fbafecef0415a87ddc471cbf18d61957ff14ef2a2681bac6cbcc415" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.715394 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69cfb4f585-8qngp" event={"ID":"0138ad61-fb17-46c7-bdd3-c65f15e2e186","Type":"ContainerDied","Data":"89af73945fbafecef0415a87ddc471cbf18d61957ff14ef2a2681bac6cbcc415"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719363 4811 generic.go:334] "Generic (PLEG): container finished" podID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerID="f1c697d3014cf2adc2ad5baf6f4f5abebc7a014d2d73bdad9756ab37a9f1de10" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719609 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15d6fdf2-a4f1-4f30-b621-31877f96868f","Type":"ContainerDied","Data":"f1c697d3014cf2adc2ad5baf6f4f5abebc7a014d2d73bdad9756ab37a9f1de10"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719858 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn88r\" (UniqueName: \"kubernetes.io/projected/bc54361e-5f34-4253-b91f-b1683e944191-kube-api-access-cn88r\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719879 4811 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719889 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719898 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6t26\" (UniqueName: \"kubernetes.io/projected/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-kube-api-access-h6t26\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719907 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc54361e-5f34-4253-b91f-b1683e944191-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719916 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp5zq\" (UniqueName: \"kubernetes.io/projected/ad905f29-3683-466d-8acd-192cc5ac0960-kube-api-access-rp5zq\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719924 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc54361e-5f34-4253-b91f-b1683e944191-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719945 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719954 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e79a2a6-54aa-4533-95ee-9deb5c46811b-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719962 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719970 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719978 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719985 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtr46\" (UniqueName: \"kubernetes.io/projected/3e79a2a6-54aa-4533-95ee-9deb5c46811b-kube-api-access-mtr46\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.719993 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e79a2a6-54aa-4533-95ee-9deb5c46811b-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.720005 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.720014 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad905f29-3683-466d-8acd-192cc5ac0960-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.727311 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerName="galera" containerID="cri-o://b0391cee65f30aa023e4b8a6992aae9b85400f523a386a5570a75825a412741e" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.728979 4811 generic.go:334] "Generic (PLEG): container finished" podID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerID="1e0db5f5c865912285b18890df2afbfd3dcd0f3d6c9ece1eee5e5a4b85b96528" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.729047 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"39ddd574-84b9-4065-9d72-5183fe430d4d","Type":"ContainerDied","Data":"1e0db5f5c865912285b18890df2afbfd3dcd0f3d6c9ece1eee5e5a4b85b96528"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.733281 4811 generic.go:334] "Generic (PLEG): container finished" podID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerID="039d7a3660d21b25f1bc174e77b3130cc47d2cdf0049929c92da48d3adb0d4b0" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.733351 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"54d8044d-232b-4d32-a2ed-fa2520b6513f","Type":"ContainerDied","Data":"039d7a3660d21b25f1bc174e77b3130cc47d2cdf0049929c92da48d3adb0d4b0"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.748000 4811 generic.go:334] "Generic (PLEG): container finished" podID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerID="7bb42b27e1f14ad69f131fe0e78ac3ea542276272ff3e8a4de4b7fc31e901928" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.748067 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d75f7765-1a6b-4bb5-819a-6891694a29c8","Type":"ContainerDied","Data":"7bb42b27e1f14ad69f131fe0e78ac3ea542276272ff3e8a4de4b7fc31e901928"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.757378 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.758827 4811 generic.go:334] "Generic (PLEG): container finished" podID="f35795d5-ffac-4851-914a-00dc84496f91" containerID="84f6ea378d0b165999f2606c252834dba8614670922d49e48387042f20f1cf01" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.758893 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f35795d5-ffac-4851-914a-00dc84496f91","Type":"ContainerDied","Data":"84f6ea378d0b165999f2606c252834dba8614670922d49e48387042f20f1cf01"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.760620 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k98kw"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.761957 4811 scope.go:117] "RemoveContainer" containerID="dbed8cd5c1ccabc67fa742e55af4b04f80e26bca8d2979d43e905bb4d63ec6a5" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.770315 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.770608 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerName="nova-cell0-conductor-conductor" containerID="cri-o://8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.776235 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k98kw"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.777786 4811 generic.go:334] "Generic (PLEG): container finished" podID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" containerID="17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886" exitCode=137 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.777966 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.784908 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" (UID: "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.789984 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e79a2a6-54aa-4533-95ee-9deb5c46811b" (UID: "3e79a2a6-54aa-4533-95ee-9deb5c46811b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.791516 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.792086 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerName="nova-scheduler-scheduler" containerID="cri-o://d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" gracePeriod=30 Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.803605 4811 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 28 16:10:58 crc kubenswrapper[4811]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 28 16:10:58 crc kubenswrapper[4811]: + source /usr/local/bin/container-scripts/functions Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNBridge=br-int Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNRemote=tcp:localhost:6642 Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNEncapType=geneve Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNAvailabilityZones= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ EnableChassisAsGateway=true Jan 28 16:10:58 crc kubenswrapper[4811]: ++ PhysicalNetworks= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNHostName= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 28 16:10:58 crc kubenswrapper[4811]: ++ ovs_dir=/var/lib/openvswitch Jan 28 16:10:58 crc kubenswrapper[4811]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 28 16:10:58 crc kubenswrapper[4811]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 28 16:10:58 crc kubenswrapper[4811]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + sleep 0.5 Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + sleep 0.5 Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + cleanup_ovsdb_server_semaphore Jan 28 16:10:58 crc kubenswrapper[4811]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 28 16:10:58 crc kubenswrapper[4811]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 28 16:10:58 crc kubenswrapper[4811]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-wzxwb" message=< Jan 28 16:10:58 crc kubenswrapper[4811]: Exiting ovsdb-server (5) ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 28 16:10:58 crc kubenswrapper[4811]: + source /usr/local/bin/container-scripts/functions Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNBridge=br-int Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNRemote=tcp:localhost:6642 Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNEncapType=geneve Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNAvailabilityZones= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ EnableChassisAsGateway=true Jan 28 16:10:58 crc kubenswrapper[4811]: ++ PhysicalNetworks= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNHostName= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 28 16:10:58 crc kubenswrapper[4811]: ++ ovs_dir=/var/lib/openvswitch Jan 28 16:10:58 crc kubenswrapper[4811]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 28 16:10:58 crc kubenswrapper[4811]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 28 16:10:58 crc kubenswrapper[4811]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + sleep 0.5 Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + sleep 0.5 Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + cleanup_ovsdb_server_semaphore Jan 28 16:10:58 crc kubenswrapper[4811]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 28 16:10:58 crc kubenswrapper[4811]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 28 16:10:58 crc kubenswrapper[4811]: > Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.803642 4811 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 28 16:10:58 crc kubenswrapper[4811]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 28 16:10:58 crc kubenswrapper[4811]: + source /usr/local/bin/container-scripts/functions Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNBridge=br-int Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNRemote=tcp:localhost:6642 Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNEncapType=geneve Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNAvailabilityZones= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ EnableChassisAsGateway=true Jan 28 16:10:58 crc kubenswrapper[4811]: ++ PhysicalNetworks= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ OVNHostName= Jan 28 16:10:58 crc kubenswrapper[4811]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 28 16:10:58 crc kubenswrapper[4811]: ++ ovs_dir=/var/lib/openvswitch Jan 28 16:10:58 crc kubenswrapper[4811]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 28 16:10:58 crc kubenswrapper[4811]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 28 16:10:58 crc kubenswrapper[4811]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + sleep 0.5 Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + sleep 0.5 Jan 28 16:10:58 crc kubenswrapper[4811]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 28 16:10:58 crc kubenswrapper[4811]: + cleanup_ovsdb_server_semaphore Jan 28 16:10:58 crc kubenswrapper[4811]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 28 16:10:58 crc kubenswrapper[4811]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 28 16:10:58 crc kubenswrapper[4811]: > pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" containerID="cri-o://d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.803671 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" containerID="cri-o://d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" gracePeriod=29 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.808909 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.809023 4811 generic.go:334] "Generic (PLEG): container finished" podID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerID="29723b95d76a6ab96074381d9690ad4cd770a48e64bafab8c23717a36eaddbf2" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.809162 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" event={"ID":"1761893b-d911-4596-b0b3-ce2d25d0384f","Type":"ContainerDied","Data":"29723b95d76a6ab96074381d9690ad4cd770a48e64bafab8c23717a36eaddbf2"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.821500 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.821545 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.821556 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.821565 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.840335 4811 generic.go:334] "Generic (PLEG): container finished" podID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerID="d1bc0298c4d4006930b0858e1c0d43af29a7daf4a41eaa0feae8d160b91e3120" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.840400 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ff73f2e3-0c2c-4008-bc61-36e65a0ad776","Type":"ContainerDied","Data":"d1bc0298c4d4006930b0858e1c0d43af29a7daf4a41eaa0feae8d160b91e3120"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.850445 4811 generic.go:334] "Generic (PLEG): container finished" podID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerID="656706d31855991db104dc63417a4a2dad789a13059f74bc9caee3f4a9998a35" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.850520 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78bd467b68-h25ls" event={"ID":"622ebcc7-b645-4db6-86c0-3546523fb7c7","Type":"ContainerDied","Data":"656706d31855991db104dc63417a4a2dad789a13059f74bc9caee3f4a9998a35"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.858069 4811 generic.go:334] "Generic (PLEG): container finished" podID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerID="2737a1ce65612a85e61445090a2668841ba19af66f524535b6accf0e61ca2d75" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.858141 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" event={"ID":"bfd184d9-d9d2-4a4b-a672-bfb76837eaca","Type":"ContainerDied","Data":"2737a1ce65612a85e61445090a2668841ba19af66f524535b6accf0e61ca2d75"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.867974 4811 generic.go:334] "Generic (PLEG): container finished" podID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerID="f494267049de0afa9aadacc83552d047aaa6b559a44a9bc8ab504e3eb508fb84" exitCode=143 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.868033 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b7c5df494-2hz6m" event={"ID":"a1a4fd4e-29a2-464f-aca7-3f856ed15221","Type":"ContainerDied","Data":"f494267049de0afa9aadacc83552d047aaa6b559a44a9bc8ab504e3eb508fb84"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.871657 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.876868 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-hzmxs"] Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.878029 4811 generic.go:334] "Generic (PLEG): container finished" podID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerID="d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.878524 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86846dcc5c-hkg4d" event={"ID":"73a0ad8a-2cb9-466e-b3e7-251823ea4528","Type":"ContainerDied","Data":"d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.881674 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" (UID: "e9d8f73c-afb3-4e7e-a69a-e8899bf051cc"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.882083 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-d62hg_3e79a2a6-54aa-4533-95ee-9deb5c46811b/openstack-network-exporter/0.log" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.882117 4811 generic.go:334] "Generic (PLEG): container finished" podID="3e79a2a6-54aa-4533-95ee-9deb5c46811b" containerID="2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80" exitCode=2 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.882163 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-d62hg" event={"ID":"3e79a2a6-54aa-4533-95ee-9deb5c46811b","Type":"ContainerDied","Data":"2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.882193 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-d62hg" event={"ID":"3e79a2a6-54aa-4533-95ee-9deb5c46811b","Type":"ContainerDied","Data":"47666d69e88477ec11bb6fd81a9729829eb8a83752e3d8e0d01393bcf0093d9f"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.882244 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-d62hg" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.886162 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-917c-account-create-update-slsg4" event={"ID":"13f72e20-02af-43d9-8400-8180b175795e","Type":"ContainerStarted","Data":"fb7d5e61f53c019ac1a1c55760774d6a07b10f7c68beb7d1afb0423c7928d68c"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.896338 4811 generic.go:334] "Generic (PLEG): container finished" podID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerID="0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.896394 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" event={"ID":"f087b3b8-82d6-4cb1-a883-21a2f0c40580","Type":"ContainerDied","Data":"0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.924139 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.924497 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.936887 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.937659 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:58 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: if [ -n "barbican" ]; then Jan 28 16:10:58 crc kubenswrapper[4811]: GRANT_DATABASE="barbican" Jan 28 16:10:58 crc kubenswrapper[4811]: else Jan 28 16:10:58 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:58 crc kubenswrapper[4811]: fi Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:58 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:58 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:58 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:58 crc kubenswrapper[4811]: # support updates Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.938774 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-917c-account-create-update-slsg4" podUID="13f72e20-02af-43d9-8400-8180b175795e" Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944787 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944813 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944822 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944829 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944837 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944843 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944848 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944855 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944861 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944867 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.944874 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb" exitCode=0 Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945312 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945380 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945398 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945415 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945446 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945461 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945473 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945488 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945502 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945514 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a"} Jan 28 16:10:58 crc kubenswrapper[4811]: I0128 16:10:58.945529 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb"} Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.956870 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:58 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: if [ -n "nova_cell0" ]; then Jan 28 16:10:58 crc kubenswrapper[4811]: GRANT_DATABASE="nova_cell0" Jan 28 16:10:58 crc kubenswrapper[4811]: else Jan 28 16:10:58 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:58 crc kubenswrapper[4811]: fi Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:58 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:58 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:58 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:58 crc kubenswrapper[4811]: # support updates Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.959690 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" podUID="8df573b1-31df-4d78-9e7a-66cf18d81aae" Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.972543 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:58 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: if [ -n "cinder" ]; then Jan 28 16:10:58 crc kubenswrapper[4811]: GRANT_DATABASE="cinder" Jan 28 16:10:58 crc kubenswrapper[4811]: else Jan 28 16:10:58 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:58 crc kubenswrapper[4811]: fi Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:58 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:58 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:58 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:58 crc kubenswrapper[4811]: # support updates Jan 28 16:10:58 crc kubenswrapper[4811]: Jan 28 16:10:58 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:58 crc kubenswrapper[4811]: E0128 16:10:58.973649 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-89d7-account-create-update-7tjkx" podUID="4815adcf-373b-4460-9f8c-d6dfaf471196" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.003347 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.005072 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "ad905f29-3683-466d-8acd-192cc5ac0960" (UID: "ad905f29-3683-466d-8acd-192cc5ac0960"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.027558 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.027592 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.027606 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad905f29-3683-466d-8acd-192cc5ac0960-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.030039 4811 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.030906 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts podName:1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e nodeName:}" failed. No retries permitted until 2026-01-28 16:11:01.03088143 +0000 UTC m=+1553.785245073 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts") pod "ovn-controller-fxxlf" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e") : configmap "ovncontroller-scripts" not found Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.046789 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.048566 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-k598l"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.056190 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.060695 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.060747 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="ovn-northd" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.061359 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2952-account-create-update-df9kg"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.062117 4811 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 28 16:10:59 crc kubenswrapper[4811]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 28 16:10:59 crc kubenswrapper[4811]: Jan 28 16:10:59 crc kubenswrapper[4811]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 28 16:10:59 crc kubenswrapper[4811]: Jan 28 16:10:59 crc kubenswrapper[4811]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 28 16:10:59 crc kubenswrapper[4811]: Jan 28 16:10:59 crc kubenswrapper[4811]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 28 16:10:59 crc kubenswrapper[4811]: Jan 28 16:10:59 crc kubenswrapper[4811]: if [ -n "nova_api" ]; then Jan 28 16:10:59 crc kubenswrapper[4811]: GRANT_DATABASE="nova_api" Jan 28 16:10:59 crc kubenswrapper[4811]: else Jan 28 16:10:59 crc kubenswrapper[4811]: GRANT_DATABASE="*" Jan 28 16:10:59 crc kubenswrapper[4811]: fi Jan 28 16:10:59 crc kubenswrapper[4811]: Jan 28 16:10:59 crc kubenswrapper[4811]: # going for maximum compatibility here: Jan 28 16:10:59 crc kubenswrapper[4811]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 28 16:10:59 crc kubenswrapper[4811]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 28 16:10:59 crc kubenswrapper[4811]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 28 16:10:59 crc kubenswrapper[4811]: # support updates Jan 28 16:10:59 crc kubenswrapper[4811]: Jan 28 16:10:59 crc kubenswrapper[4811]: $MYSQL_CMD < logger="UnhandledError" Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.065421 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-2952-account-create-update-df9kg" podUID="7b876d22-ae70-479d-82b1-113a9d760a0f" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.101652 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.102615 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "bc54361e-5f34-4253-b91f-b1683e944191" (UID: "bc54361e-5f34-4253-b91f-b1683e944191"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.113286 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "3e79a2a6-54aa-4533-95ee-9deb5c46811b" (UID: "3e79a2a6-54aa-4533-95ee-9deb5c46811b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.131354 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.131382 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e79a2a6-54aa-4533-95ee-9deb5c46811b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.131391 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc54361e-5f34-4253-b91f-b1683e944191-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.142089 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.145964 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.148076 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.148108 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerName="nova-cell0-conductor-conductor" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.214231 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.214775 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.229577 4811 scope.go:117] "RemoveContainer" containerID="fa6828f22b3be25a45cd83e03b0c456790ff1d215506294c5c624f55eae5a0fc" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.232910 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-swift-storage-0\") pod \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.232958 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-sb\") pod \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.232989 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hk7k\" (UniqueName: \"kubernetes.io/projected/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-kube-api-access-7hk7k\") pod \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.233019 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-svc\") pod \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.233046 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-nb\") pod \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.233070 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-config\") pod \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\" (UID: \"bfd184d9-d9d2-4a4b-a672-bfb76837eaca\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.248099 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-kube-api-access-7hk7k" (OuterVolumeSpecName: "kube-api-access-7hk7k") pod "bfd184d9-d9d2-4a4b-a672-bfb76837eaca" (UID: "bfd184d9-d9d2-4a4b-a672-bfb76837eaca"). InnerVolumeSpecName "kube-api-access-7hk7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.249187 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.270543 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-d62hg"] Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.273222 4811 scope.go:117] "RemoveContainer" containerID="a7b75f68bd55325c1892a8a68699e72299412f605399d3727dc7a8c78169fd1c" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.279517 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-d62hg"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.328350 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.330907 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.332409 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.332492 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerName="nova-scheduler-scheduler" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.352185 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hk7k\" (UniqueName: \"kubernetes.io/projected/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-kube-api-access-7hk7k\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.367568 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.376589 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.394052 4811 scope.go:117] "RemoveContainer" containerID="17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.426383 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-config" (OuterVolumeSpecName: "config") pod "bfd184d9-d9d2-4a4b-a672-bfb76837eaca" (UID: "bfd184d9-d9d2-4a4b-a672-bfb76837eaca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.426951 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bfd184d9-d9d2-4a4b-a672-bfb76837eaca" (UID: "bfd184d9-d9d2-4a4b-a672-bfb76837eaca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.429415 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bfd184d9-d9d2-4a4b-a672-bfb76837eaca" (UID: "bfd184d9-d9d2-4a4b-a672-bfb76837eaca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.429729 4811 scope.go:117] "RemoveContainer" containerID="17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886" Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.430676 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886\": container with ID starting with 17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886 not found: ID does not exist" containerID="17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.430714 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886"} err="failed to get container status \"17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886\": rpc error: code = NotFound desc = could not find container \"17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886\": container with ID starting with 17b26e241f71fd0bba9cd473dd072e8e999cd250ef30a45cea6cacdf8de99886 not found: ID does not exist" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.430739 4811 scope.go:117] "RemoveContainer" containerID="2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.443619 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bfd184d9-d9d2-4a4b-a672-bfb76837eaca" (UID: "bfd184d9-d9d2-4a4b-a672-bfb76837eaca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.447689 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bfd184d9-d9d2-4a4b-a672-bfb76837eaca" (UID: "bfd184d9-d9d2-4a4b-a672-bfb76837eaca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.456840 4811 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.456868 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.456878 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.456887 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.456895 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd184d9-d9d2-4a4b-a672-bfb76837eaca-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.458149 4811 scope.go:117] "RemoveContainer" containerID="2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80" Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.461540 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80\": container with ID starting with 2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80 not found: ID does not exist" containerID="2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.461571 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80"} err="failed to get container status \"2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80\": rpc error: code = NotFound desc = could not find container \"2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80\": container with ID starting with 2e79233cf89f05e85e48d2caa1daae0928b9b4d3e75da1ee62c6549871edbf80 not found: ID does not exist" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.583317 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660281 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-log-httpd\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660329 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-config-data\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660413 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-etc-swift\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660481 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-run-httpd\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660547 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-combined-ca-bundle\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660609 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ws7lj\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-kube-api-access-ws7lj\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660633 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-internal-tls-certs\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660674 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-public-tls-certs\") pod \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\" (UID: \"f087b3b8-82d6-4cb1-a883-21a2f0c40580\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660726 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.660865 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.661574 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.661590 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f087b3b8-82d6-4cb1-a883-21a2f0c40580-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.667135 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.674184 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-kube-api-access-ws7lj" (OuterVolumeSpecName: "kube-api-access-ws7lj") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "kube-api-access-ws7lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.749669 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.759475 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-config-data" (OuterVolumeSpecName: "config-data") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.764392 4811 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.764445 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ws7lj\" (UniqueName: \"kubernetes.io/projected/f087b3b8-82d6-4cb1-a883-21a2f0c40580-kube-api-access-ws7lj\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.764460 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.764472 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.764545 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:59 crc kubenswrapper[4811]: E0128 16:10:59.764599 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data podName:3b7599aa-7114-46c9-bf38-b6d735b75326 nodeName:}" failed. No retries permitted until 2026-01-28 16:11:03.764580158 +0000 UTC m=+1556.518943741 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data") pod "rabbitmq-cell1-server-0" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326") : configmap "rabbitmq-cell1-config-data" not found Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.794737 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.796168 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f087b3b8-82d6-4cb1-a883-21a2f0c40580" (UID: "f087b3b8-82d6-4cb1-a883-21a2f0c40580"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.884581 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.884613 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f087b3b8-82d6-4cb1-a883-21a2f0c40580-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.932840 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.985953 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-nova-novncproxy-tls-certs\") pod \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.985998 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-config-data\") pod \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.986178 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-vencrypt-tls-certs\") pod \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.986278 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bn5z\" (UniqueName: \"kubernetes.io/projected/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-kube-api-access-9bn5z\") pod \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " Jan 28 16:10:59 crc kubenswrapper[4811]: I0128 16:10:59.986297 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-combined-ca-bundle\") pod \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\" (UID: \"0bb2deb3-c0dd-4102-96dd-b21dd187bf89\") " Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.010976 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-kube-api-access-9bn5z" (OuterVolumeSpecName: "kube-api-access-9bn5z") pod "0bb2deb3-c0dd-4102-96dd-b21dd187bf89" (UID: "0bb2deb3-c0dd-4102-96dd-b21dd187bf89"). InnerVolumeSpecName "kube-api-access-9bn5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.011193 4811 generic.go:334] "Generic (PLEG): container finished" podID="a2ab1b57-166e-4409-8b71-0bb922787623" containerID="03205b6d59e1ada02c7d3945a11dd43315f762964b49cd64bc2729dd34ccdbf7" exitCode=1 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.011261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-k598l" event={"ID":"a2ab1b57-166e-4409-8b71-0bb922787623","Type":"ContainerDied","Data":"03205b6d59e1ada02c7d3945a11dd43315f762964b49cd64bc2729dd34ccdbf7"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.011281 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-k598l" event={"ID":"a2ab1b57-166e-4409-8b71-0bb922787623","Type":"ContainerStarted","Data":"093d125098a6804dc699d75f4d7b3713eeb1b57a33cde60d8b85f2bc1403177a"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.012082 4811 scope.go:117] "RemoveContainer" containerID="03205b6d59e1ada02c7d3945a11dd43315f762964b49cd64bc2729dd34ccdbf7" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.014809 4811 generic.go:334] "Generic (PLEG): container finished" podID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerID="a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd" exitCode=0 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.014861 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" event={"ID":"f087b3b8-82d6-4cb1-a883-21a2f0c40580","Type":"ContainerDied","Data":"a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.014890 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" event={"ID":"f087b3b8-82d6-4cb1-a883-21a2f0c40580","Type":"ContainerDied","Data":"2cae1699d6600a62abe5afb6ed86fe1873f7f678d8129a154b95f481914171e1"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.014904 4811 scope.go:117] "RemoveContainer" containerID="a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.014979 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-56fd96c5b5-d2r9l" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.017734 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-config-data" (OuterVolumeSpecName: "config-data") pod "0bb2deb3-c0dd-4102-96dd-b21dd187bf89" (UID: "0bb2deb3-c0dd-4102-96dd-b21dd187bf89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.027487 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" event={"ID":"bfd184d9-d9d2-4a4b-a672-bfb76837eaca","Type":"ContainerDied","Data":"6eb827e232c933ee3a3117528575820dd17b0085464efb3cbedaee14dc25f970"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.027737 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-s5tc2" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.043906 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2952-account-create-update-df9kg" event={"ID":"7b876d22-ae70-479d-82b1-113a9d760a0f","Type":"ContainerStarted","Data":"e291178e43d515b5144ff7ee4fa996a985a42844a923284bc56358789d2e5755"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.051865 4811 generic.go:334] "Generic (PLEG): container finished" podID="8bfbbf41-033c-479e-b625-396378f8afa2" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" exitCode=0 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.051920 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerDied","Data":"d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.053999 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "0bb2deb3-c0dd-4102-96dd-b21dd187bf89" (UID: "0bb2deb3-c0dd-4102-96dd-b21dd187bf89"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.067152 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" event={"ID":"8df573b1-31df-4d78-9e7a-66cf18d81aae","Type":"ContainerStarted","Data":"d5bc1bd427c68235218cdae0b0ad750eedb4d42929ac26886b17e8eb0cafcb27"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.071624 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0bb2deb3-c0dd-4102-96dd-b21dd187bf89" (UID: "0bb2deb3-c0dd-4102-96dd-b21dd187bf89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.080080 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "0bb2deb3-c0dd-4102-96dd-b21dd187bf89" (UID: "0bb2deb3-c0dd-4102-96dd-b21dd187bf89"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.089355 4811 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.089589 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.089690 4811 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.089913 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.090011 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bn5z\" (UniqueName: \"kubernetes.io/projected/0bb2deb3-c0dd-4102-96dd-b21dd187bf89-kube-api-access-9bn5z\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.133746 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3" exitCode=0 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.134044 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12" exitCode=0 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.134053 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9" exitCode=0 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.134108 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.134133 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.134146 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.141260 4811 generic.go:334] "Generic (PLEG): container finished" podID="0bb2deb3-c0dd-4102-96dd-b21dd187bf89" containerID="48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c" exitCode=0 Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.141590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0bb2deb3-c0dd-4102-96dd-b21dd187bf89","Type":"ContainerDied","Data":"48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.141618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0bb2deb3-c0dd-4102-96dd-b21dd187bf89","Type":"ContainerDied","Data":"9a02085d03a698530a4112ea54ee18620633ea2518ef96a46e19a88eda1d0b9b"} Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.145738 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.197640 4811 scope.go:117] "RemoveContainer" containerID="0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04" Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.197413 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-s5tc2"] Jan 28 16:11:00 crc kubenswrapper[4811]: I0128 16:11:00.198863 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-s5tc2"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.247505 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-56fd96c5b5-d2r9l"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.249807 4811 scope.go:117] "RemoveContainer" containerID="a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.252307 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd\": container with ID starting with a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd not found: ID does not exist" containerID="a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.252345 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd"} err="failed to get container status \"a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd\": rpc error: code = NotFound desc = could not find container \"a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd\": container with ID starting with a235c2b4225ce39f8a09e2a45966350b20607db9b21d8ff27ca73be5d353cadd not found: ID does not exist" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.252374 4811 scope.go:117] "RemoveContainer" containerID="0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.253199 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04\": container with ID starting with 0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04 not found: ID does not exist" containerID="0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.253258 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04"} err="failed to get container status \"0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04\": rpc error: code = NotFound desc = could not find container \"0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04\": container with ID starting with 0a27d600e6b6c3ccf2865906d7a304e9db45620c7979a493a59166dfb8cd6b04 not found: ID does not exist" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.253276 4811 scope.go:117] "RemoveContainer" containerID="2737a1ce65612a85e61445090a2668841ba19af66f524535b6accf0e61ca2d75" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.264530 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-56fd96c5b5-d2r9l"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.277730 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.290571 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.292258 4811 scope.go:117] "RemoveContainer" containerID="c407971373e14f6665ab21c12a8b6508395da0a3c8d48f45186867275344d4b2" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.301778 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.301871 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data podName:23095127-8b86-445a-8c32-1e6bc14bf05e nodeName:}" failed. No retries permitted until 2026-01-28 16:11:04.301851186 +0000 UTC m=+1557.056214769 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data") pod "rabbitmq-server-0" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e") : configmap "rabbitmq-config-data" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.322694 4811 scope.go:117] "RemoveContainer" containerID="48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.372365 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bb2deb3-c0dd-4102-96dd-b21dd187bf89" path="/var/lib/kubelet/pods/0bb2deb3-c0dd-4102-96dd-b21dd187bf89/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.372955 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10427a85-4d03-4473-8be9-a246485b3594" path="/var/lib/kubelet/pods/10427a85-4d03-4473-8be9-a246485b3594/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.375327 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="120fd687-33f8-4530-9a07-c0388d53fd4e" path="/var/lib/kubelet/pods/120fd687-33f8-4530-9a07-c0388d53fd4e/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.375812 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cb09ee7-ca68-4a03-9190-242af91783b1" path="/var/lib/kubelet/pods/2cb09ee7-ca68-4a03-9190-242af91783b1/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.388333 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e79a2a6-54aa-4533-95ee-9deb5c46811b" path="/var/lib/kubelet/pods/3e79a2a6-54aa-4533-95ee-9deb5c46811b/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.389066 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69c315f0-b684-4e32-b680-ca6bc1823beb" path="/var/lib/kubelet/pods/69c315f0-b684-4e32-b680-ca6bc1823beb/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.389600 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e7de29a-2b04-4dc8-99f1-1d3568819adf" path="/var/lib/kubelet/pods/8e7de29a-2b04-4dc8-99f1-1d3568819adf/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.400608 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dec1ac3-ea25-430b-8934-fda54a06648c" path="/var/lib/kubelet/pods/9dec1ac3-ea25-430b-8934-fda54a06648c/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.401843 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" path="/var/lib/kubelet/pods/ad905f29-3683-466d-8acd-192cc5ac0960/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.404392 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc54361e-5f34-4253-b91f-b1683e944191" path="/var/lib/kubelet/pods/bc54361e-5f34-4253-b91f-b1683e944191/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.405089 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" path="/var/lib/kubelet/pods/bfd184d9-d9d2-4a4b-a672-bfb76837eaca/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.406899 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d41fbd0a-fb75-4597-a9ed-0dff56b7b635" path="/var/lib/kubelet/pods/d41fbd0a-fb75-4597-a9ed-0dff56b7b635/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.407391 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9d8f73c-afb3-4e7e-a69a-e8899bf051cc" path="/var/lib/kubelet/pods/e9d8f73c-afb3-4e7e-a69a-e8899bf051cc/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.407990 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" path="/var/lib/kubelet/pods/f087b3b8-82d6-4cb1-a883-21a2f0c40580/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.409224 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.414733 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-central-agent" containerID="cri-o://42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.415139 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="proxy-httpd" containerID="cri-o://2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.415181 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="sg-core" containerID="cri-o://bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.415215 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-notification-agent" containerID="cri-o://261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.481686 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.481872 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f80333f7-9036-49d9-8c68-03e4ef8f9ee8" containerName="kube-state-metrics" containerID="cri-o://01de404d5b6e89fb138dbe6253054f8965d2469aab77f7679acb0d66fb85419b" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.544120 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.544374 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" containerName="memcached" containerID="cri-o://9c930ce4914f5a68852c3724ae30e8283ace4f330ad36c534f8e4f690608fe20" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.555624 4811 scope.go:117] "RemoveContainer" containerID="48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.560266 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c\": container with ID starting with 48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c not found: ID does not exist" containerID="48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.560303 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c"} err="failed to get container status \"48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c\": rpc error: code = NotFound desc = could not find container \"48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c\": container with ID starting with 48932a3a225c09d53791cf3601a19fdee760a4b3988f860040cd3de5ac4f1b6c not found: ID does not exist" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.586500 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9b5c-account-create-update-4frsd"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.616040 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9b5c-account-create-update-4frsd"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631341 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9b5c-account-create-update-nfl79"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631757 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="ovsdbserver-sb" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631768 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="ovsdbserver-sb" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631779 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="ovsdbserver-nb" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631788 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="ovsdbserver-nb" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631800 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerName="init" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631812 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerName="init" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631826 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-server" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631831 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-server" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631841 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631847 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631865 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb2deb3-c0dd-4102-96dd-b21dd187bf89" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631870 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb2deb3-c0dd-4102-96dd-b21dd187bf89" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631884 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-httpd" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631890 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-httpd" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631899 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e79a2a6-54aa-4533-95ee-9deb5c46811b" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631905 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e79a2a6-54aa-4533-95ee-9deb5c46811b" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631921 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631928 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.631939 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerName="dnsmasq-dns" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.631945 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerName="dnsmasq-dns" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632107 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632119 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e79a2a6-54aa-4533-95ee-9deb5c46811b" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632129 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfd184d9-d9d2-4a4b-a672-bfb76837eaca" containerName="dnsmasq-dns" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632136 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc54361e-5f34-4253-b91f-b1683e944191" containerName="ovsdbserver-nb" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632143 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bb2deb3-c0dd-4102-96dd-b21dd187bf89" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632154 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-server" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632166 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f087b3b8-82d6-4cb1-a883-21a2f0c40580" containerName="proxy-httpd" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632174 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="openstack-network-exporter" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632181 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad905f29-3683-466d-8acd-192cc5ac0960" containerName="ovsdbserver-sb" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.632759 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.640085 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.644771 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-ssv4k"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.664786 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7b7bd9558c-kbczp"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.665071 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-7b7bd9558c-kbczp" podUID="e10c1c14-4c1a-445e-9c98-1f0b6b334802" containerName="keystone-api" containerID="cri-o://1408d806929ba9f320ef606ac871fcb79473917f47486e47e26772ffb153aa37" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.682812 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gff7z"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.693386 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b5c-account-create-update-nfl79"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.706483 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-ssv4k"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.720156 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.720355 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlscw\" (UniqueName: \"kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.733747 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gff7z"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.740037 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.749199 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-k598l"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.758940 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.780635 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-dv6jj"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.787691 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-dv6jj"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.794280 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9b5c-account-create-update-nfl79"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.794967 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-nlscw operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-9b5c-account-create-update-nfl79" podUID="d6a4d1b5-4521-455f-9ed1-4a2de1b678ee" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.822030 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df573b1-31df-4d78-9e7a-66cf18d81aae-operator-scripts\") pod \"8df573b1-31df-4d78-9e7a-66cf18d81aae\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.822379 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmvtx\" (UniqueName: \"kubernetes.io/projected/8df573b1-31df-4d78-9e7a-66cf18d81aae-kube-api-access-hmvtx\") pod \"8df573b1-31df-4d78-9e7a-66cf18d81aae\" (UID: \"8df573b1-31df-4d78-9e7a-66cf18d81aae\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.822729 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlscw\" (UniqueName: \"kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.822830 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.823000 4811 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.823050 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts podName:d6a4d1b5-4521-455f-9ed1-4a2de1b678ee nodeName:}" failed. No retries permitted until 2026-01-28 16:11:01.323034398 +0000 UTC m=+1554.077397981 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts") pod "keystone-9b5c-account-create-update-nfl79" (UID: "d6a4d1b5-4521-455f-9ed1-4a2de1b678ee") : configmap "openstack-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.823806 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8df573b1-31df-4d78-9e7a-66cf18d81aae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8df573b1-31df-4d78-9e7a-66cf18d81aae" (UID: "8df573b1-31df-4d78-9e7a-66cf18d81aae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.828640 4811 projected.go:194] Error preparing data for projected volume kube-api-access-nlscw for pod openstack/keystone-9b5c-account-create-update-nfl79: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:00.828697 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw podName:d6a4d1b5-4521-455f-9ed1-4a2de1b678ee nodeName:}" failed. No retries permitted until 2026-01-28 16:11:01.328680061 +0000 UTC m=+1554.083043644 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-nlscw" (UniqueName: "kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw") pod "keystone-9b5c-account-create-update-nfl79" (UID: "d6a4d1b5-4521-455f-9ed1-4a2de1b678ee") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.829094 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8df573b1-31df-4d78-9e7a-66cf18d81aae-kube-api-access-hmvtx" (OuterVolumeSpecName: "kube-api-access-hmvtx") pod "8df573b1-31df-4d78-9e7a-66cf18d81aae" (UID: "8df573b1-31df-4d78-9e7a-66cf18d81aae"). InnerVolumeSpecName "kube-api-access-hmvtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.850021 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.162:8776/healthcheck\": read tcp 10.217.0.2:34996->10.217.0.162:8776: read: connection reset by peer" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.925604 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df573b1-31df-4d78-9e7a-66cf18d81aae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.925628 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmvtx\" (UniqueName: \"kubernetes.io/projected/8df573b1-31df-4d78-9e7a-66cf18d81aae-kube-api-access-hmvtx\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:00.940035 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="galera" containerID="cri-o://6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd" gracePeriod=30 Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.129366 4811 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.129455 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts podName:1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e nodeName:}" failed. No retries permitted until 2026-01-28 16:11:05.129421241 +0000 UTC m=+1557.883784824 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts") pod "ovn-controller-fxxlf" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e") : configmap "ovncontroller-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.152830 4811 generic.go:334] "Generic (PLEG): container finished" podID="f80333f7-9036-49d9-8c68-03e4ef8f9ee8" containerID="01de404d5b6e89fb138dbe6253054f8965d2469aab77f7679acb0d66fb85419b" exitCode=2 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.152880 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f80333f7-9036-49d9-8c68-03e4ef8f9ee8","Type":"ContainerDied","Data":"01de404d5b6e89fb138dbe6253054f8965d2469aab77f7679acb0d66fb85419b"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.165009 4811 generic.go:334] "Generic (PLEG): container finished" podID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerID="6cba40ddbfcb2754d34d28f3d64877131d7be93e51d4f11947d0c71c0b815b3f" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.165093 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b7c5df494-2hz6m" event={"ID":"a1a4fd4e-29a2-464f-aca7-3f856ed15221","Type":"ContainerDied","Data":"6cba40ddbfcb2754d34d28f3d64877131d7be93e51d4f11947d0c71c0b815b3f"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.168411 4811 generic.go:334] "Generic (PLEG): container finished" podID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerID="2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.168472 4811 generic.go:334] "Generic (PLEG): container finished" podID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerID="bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963" exitCode=2 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.168479 4811 generic.go:334] "Generic (PLEG): container finished" podID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerID="42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.168514 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerDied","Data":"2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.168549 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerDied","Data":"bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.168561 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerDied","Data":"42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.171322 4811 generic.go:334] "Generic (PLEG): container finished" podID="a2ab1b57-166e-4409-8b71-0bb922787623" containerID="a764cfb01d9db54ab17147acfefb019173650bcda8aafd295afa3a359d0f21a0" exitCode=1 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.171416 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-k598l" event={"ID":"a2ab1b57-166e-4409-8b71-0bb922787623","Type":"ContainerDied","Data":"a764cfb01d9db54ab17147acfefb019173650bcda8aafd295afa3a359d0f21a0"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.171485 4811 scope.go:117] "RemoveContainer" containerID="03205b6d59e1ada02c7d3945a11dd43315f762964b49cd64bc2729dd34ccdbf7" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.184273 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" event={"ID":"8df573b1-31df-4d78-9e7a-66cf18d81aae","Type":"ContainerDied","Data":"d5bc1bd427c68235218cdae0b0ad750eedb4d42929ac26886b17e8eb0cafcb27"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.184376 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-99f2-account-create-update-hzmxs" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.207397 4811 generic.go:334] "Generic (PLEG): container finished" podID="f35795d5-ffac-4851-914a-00dc84496f91" containerID="a156c42e817f2655b9c4afbf1946c7359670560b8886dbd5e1f1475d498cce7d" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.207508 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f35795d5-ffac-4851-914a-00dc84496f91","Type":"ContainerDied","Data":"a156c42e817f2655b9c4afbf1946c7359670560b8886dbd5e1f1475d498cce7d"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.219640 4811 generic.go:334] "Generic (PLEG): container finished" podID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerID="f0a91b4e447ff1a52135fb776145bb39ff121699ece20c0eab9dbf3f1f9735a1" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.219722 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.220467 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"39ddd574-84b9-4065-9d72-5183fe430d4d","Type":"ContainerDied","Data":"f0a91b4e447ff1a52135fb776145bb39ff121699ece20c0eab9dbf3f1f9735a1"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.285548 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.324584 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-hzmxs"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.332934 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlscw\" (UniqueName: \"kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.333024 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.333159 4811 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.333208 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts podName:d6a4d1b5-4521-455f-9ed1-4a2de1b678ee nodeName:}" failed. No retries permitted until 2026-01-28 16:11:02.33319296 +0000 UTC m=+1555.087556543 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts") pod "keystone-9b5c-account-create-update-nfl79" (UID: "d6a4d1b5-4521-455f-9ed1-4a2de1b678ee") : configmap "openstack-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.334044 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-99f2-account-create-update-hzmxs"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.337237 4811 projected.go:194] Error preparing data for projected volume kube-api-access-nlscw for pod openstack/keystone-9b5c-account-create-update-nfl79: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.337355 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw podName:d6a4d1b5-4521-455f-9ed1-4a2de1b678ee nodeName:}" failed. No retries permitted until 2026-01-28 16:11:02.337286401 +0000 UTC m=+1555.091650064 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-nlscw" (UniqueName: "kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw") pod "keystone-9b5c-account-create-update-nfl79" (UID: "d6a4d1b5-4521-455f-9ed1-4a2de1b678ee") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.477269 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-78bd467b68-h25ls" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:56440->10.217.0.156:9311: read: connection reset by peer" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:01.477269 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-78bd467b68-h25ls" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:56430->10.217.0.156:9311: read: connection reset by peer" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:01.912194 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84b57c00_a800_4b82_98c7_8ebcc04c2ab6.slice/crio-conmon-9c930ce4914f5a68852c3724ae30e8283ace4f330ad36c534f8e4f690608fe20.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd75f7765_1a6b_4bb5_819a_6891694a29c8.slice/crio-conmon-7f7face6d95fe804267fa4f30c1e01cfdaa41613853274a35697b679666f13c6.scope\": RecentStats: unable to find data in memory cache]" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.096246 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/memcached-0" podUID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.106:11211: connect: connection refused" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.173990 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.174822 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.175698 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.175766 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.176177 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.178118 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.183729 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.183812 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.247920 4811 generic.go:334] "Generic (PLEG): container finished" podID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerID="7f7face6d95fe804267fa4f30c1e01cfdaa41613853274a35697b679666f13c6" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.248021 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d75f7765-1a6b-4bb5-819a-6891694a29c8","Type":"ContainerDied","Data":"7f7face6d95fe804267fa4f30c1e01cfdaa41613853274a35697b679666f13c6"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.256649 4811 generic.go:334] "Generic (PLEG): container finished" podID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerID="751f6099de1911b3d0549631c9b2b444b7a26a8a63ae6b18819217bbb07da0f4" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.256737 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15d6fdf2-a4f1-4f30-b621-31877f96868f","Type":"ContainerDied","Data":"751f6099de1911b3d0549631c9b2b444b7a26a8a63ae6b18819217bbb07da0f4"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.259196 4811 generic.go:334] "Generic (PLEG): container finished" podID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerID="fcc510fe7aab0a06fcaa0e25c25b295dadfdee8c254acdff99aa414590067770" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.259260 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69cfb4f585-8qngp" event={"ID":"0138ad61-fb17-46c7-bdd3-c65f15e2e186","Type":"ContainerDied","Data":"fcc510fe7aab0a06fcaa0e25c25b295dadfdee8c254acdff99aa414590067770"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.267056 4811 generic.go:334] "Generic (PLEG): container finished" podID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerID="536a91cb8574f375dfd0ebc16db3f46162b78ee4bd2c334c6a012f5ff6e1ed7a" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.267098 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" event={"ID":"1761893b-d911-4596-b0b3-ce2d25d0384f","Type":"ContainerDied","Data":"536a91cb8574f375dfd0ebc16db3f46162b78ee4bd2c334c6a012f5ff6e1ed7a"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.268952 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-fxxlf" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" probeResult="failure" output=< Jan 28 16:11:02 crc kubenswrapper[4811]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Jan 28 16:11:02 crc kubenswrapper[4811]: > Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.270495 4811 generic.go:334] "Generic (PLEG): container finished" podID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerID="f3194b493d5ddf32d8f9d2a436bb598bd51e3c3d501ca80d66d47ad0bff2fffb" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.270540 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78bd467b68-h25ls" event={"ID":"622ebcc7-b645-4db6-86c0-3546523fb7c7","Type":"ContainerDied","Data":"f3194b493d5ddf32d8f9d2a436bb598bd51e3c3d501ca80d66d47ad0bff2fffb"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.271791 4811 generic.go:334] "Generic (PLEG): container finished" podID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" containerID="9c930ce4914f5a68852c3724ae30e8283ace4f330ad36c534f8e4f690608fe20" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.271830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"84b57c00-a800-4b82-98c7-8ebcc04c2ab6","Type":"ContainerDied","Data":"9c930ce4914f5a68852c3724ae30e8283ace4f330ad36c534f8e4f690608fe20"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.276551 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": dial tcp 10.217.0.209:8775: connect: connection refused" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.276584 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": dial tcp 10.217.0.209:8775: connect: connection refused" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.278329 4811 generic.go:334] "Generic (PLEG): container finished" podID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerID="d388772dd1846d95d234d7f0fb94958aae8f34f960a0d56e2efdfefc4e673f8d" exitCode=0 Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.278381 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"54d8044d-232b-4d32-a2ed-fa2520b6513f","Type":"ContainerDied","Data":"d388772dd1846d95d234d7f0fb94958aae8f34f960a0d56e2efdfefc4e673f8d"} Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.278417 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.334960 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9b5c-account-create-update-nfl79"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.354091 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlscw\" (UniqueName: \"kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.354189 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts\") pod \"keystone-9b5c-account-create-update-nfl79\" (UID: \"d6a4d1b5-4521-455f-9ed1-4a2de1b678ee\") " pod="openstack/keystone-9b5c-account-create-update-nfl79" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.354356 4811 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.354409 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts podName:d6a4d1b5-4521-455f-9ed1-4a2de1b678ee nodeName:}" failed. No retries permitted until 2026-01-28 16:11:04.35439523 +0000 UTC m=+1557.108758813 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts") pod "keystone-9b5c-account-create-update-nfl79" (UID: "d6a4d1b5-4521-455f-9ed1-4a2de1b678ee") : configmap "openstack-scripts" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.357283 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fbf8a43-325c-4b8b-bdfa-8909e88b6b31" path="/var/lib/kubelet/pods/1fbf8a43-325c-4b8b-bdfa-8909e88b6b31/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.357955 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="411c0e6f-b062-4538-8fda-434912e7c73d" path="/var/lib/kubelet/pods/411c0e6f-b062-4538-8fda-434912e7c73d/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.358508 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63a654e3-54e7-4464-b0b8-683f5f809345" path="/var/lib/kubelet/pods/63a654e3-54e7-4464-b0b8-683f5f809345/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.359019 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c7795b3-4e53-4e49-b296-8ed8fe0ab751" path="/var/lib/kubelet/pods/6c7795b3-4e53-4e49-b296-8ed8fe0ab751/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.360996 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8df573b1-31df-4d78-9e7a-66cf18d81aae" path="/var/lib/kubelet/pods/8df573b1-31df-4d78-9e7a-66cf18d81aae/volumes" Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.361235 4811 projected.go:194] Error preparing data for projected volume kube-api-access-nlscw for pod openstack/keystone-9b5c-account-create-update-nfl79: failed to fetch token: pod "keystone-9b5c-account-create-update-nfl79" not found Jan 28 16:11:02 crc kubenswrapper[4811]: E0128 16:11:02.361315 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw podName:d6a4d1b5-4521-455f-9ed1-4a2de1b678ee nodeName:}" failed. No retries permitted until 2026-01-28 16:11:04.361295758 +0000 UTC m=+1557.115659381 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-nlscw" (UniqueName: "kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw") pod "keystone-9b5c-account-create-update-nfl79" (UID: "d6a4d1b5-4521-455f-9ed1-4a2de1b678ee") : failed to fetch token: pod "keystone-9b5c-account-create-update-nfl79" not found Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.361541 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9b5c-account-create-update-nfl79"] Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.456418 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.456498 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlscw\" (UniqueName: \"kubernetes.io/projected/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee-kube-api-access-nlscw\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.512415 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.525765 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.528985 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.561846 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b876d22-ae70-479d-82b1-113a9d760a0f-operator-scripts\") pod \"7b876d22-ae70-479d-82b1-113a9d760a0f\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.562455 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwctl\" (UniqueName: \"kubernetes.io/projected/7b876d22-ae70-479d-82b1-113a9d760a0f-kube-api-access-fwctl\") pod \"7b876d22-ae70-479d-82b1-113a9d760a0f\" (UID: \"7b876d22-ae70-479d-82b1-113a9d760a0f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.563831 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b876d22-ae70-479d-82b1-113a9d760a0f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7b876d22-ae70-479d-82b1-113a9d760a0f" (UID: "7b876d22-ae70-479d-82b1-113a9d760a0f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.570406 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b876d22-ae70-479d-82b1-113a9d760a0f-kube-api-access-fwctl" (OuterVolumeSpecName: "kube-api-access-fwctl") pod "7b876d22-ae70-479d-82b1-113a9d760a0f" (UID: "7b876d22-ae70-479d-82b1-113a9d760a0f"). InnerVolumeSpecName "kube-api-access-fwctl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.666668 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgc76\" (UniqueName: \"kubernetes.io/projected/13f72e20-02af-43d9-8400-8180b175795e-kube-api-access-kgc76\") pod \"13f72e20-02af-43d9-8400-8180b175795e\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.667668 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4815adcf-373b-4460-9f8c-d6dfaf471196-operator-scripts\") pod \"4815adcf-373b-4460-9f8c-d6dfaf471196\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.667787 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64z2q\" (UniqueName: \"kubernetes.io/projected/4815adcf-373b-4460-9f8c-d6dfaf471196-kube-api-access-64z2q\") pod \"4815adcf-373b-4460-9f8c-d6dfaf471196\" (UID: \"4815adcf-373b-4460-9f8c-d6dfaf471196\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.667903 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13f72e20-02af-43d9-8400-8180b175795e-operator-scripts\") pod \"13f72e20-02af-43d9-8400-8180b175795e\" (UID: \"13f72e20-02af-43d9-8400-8180b175795e\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.668686 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b876d22-ae70-479d-82b1-113a9d760a0f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.668780 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwctl\" (UniqueName: \"kubernetes.io/projected/7b876d22-ae70-479d-82b1-113a9d760a0f-kube-api-access-fwctl\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.669277 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4815adcf-373b-4460-9f8c-d6dfaf471196-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4815adcf-373b-4460-9f8c-d6dfaf471196" (UID: "4815adcf-373b-4460-9f8c-d6dfaf471196"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.677874 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4815adcf-373b-4460-9f8c-d6dfaf471196-kube-api-access-64z2q" (OuterVolumeSpecName: "kube-api-access-64z2q") pod "4815adcf-373b-4460-9f8c-d6dfaf471196" (UID: "4815adcf-373b-4460-9f8c-d6dfaf471196"). InnerVolumeSpecName "kube-api-access-64z2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.682141 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13f72e20-02af-43d9-8400-8180b175795e-kube-api-access-kgc76" (OuterVolumeSpecName: "kube-api-access-kgc76") pod "13f72e20-02af-43d9-8400-8180b175795e" (UID: "13f72e20-02af-43d9-8400-8180b175795e"). InnerVolumeSpecName "kube-api-access-kgc76". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.688404 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13f72e20-02af-43d9-8400-8180b175795e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13f72e20-02af-43d9-8400-8180b175795e" (UID: "13f72e20-02af-43d9-8400-8180b175795e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.739564 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.798109 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4815adcf-373b-4460-9f8c-d6dfaf471196-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.798150 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64z2q\" (UniqueName: \"kubernetes.io/projected/4815adcf-373b-4460-9f8c-d6dfaf471196-kube-api-access-64z2q\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.798164 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13f72e20-02af-43d9-8400-8180b175795e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.798182 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgc76\" (UniqueName: \"kubernetes.io/projected/13f72e20-02af-43d9-8400-8180b175795e-kube-api-access-kgc76\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.818614 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.858065 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899425 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-public-tls-certs\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899487 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-combined-ca-bundle\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899550 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-config-data\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899580 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-logs\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899600 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-internal-tls-certs\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899650 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-combined-ca-bundle\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899668 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-httpd-run\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899706 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-scripts\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899731 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-httpd-run\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899768 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-scripts\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899801 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-internal-tls-certs\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899825 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq6c4\" (UniqueName: \"kubernetes.io/projected/54d8044d-232b-4d32-a2ed-fa2520b6513f-kube-api-access-cq6c4\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899849 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-logs\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.899882 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tplp\" (UniqueName: \"kubernetes.io/projected/f35795d5-ffac-4851-914a-00dc84496f91-kube-api-access-7tplp\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.900005 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"54d8044d-232b-4d32-a2ed-fa2520b6513f\" (UID: \"54d8044d-232b-4d32-a2ed-fa2520b6513f\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.900072 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.900103 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-config-data\") pod \"f35795d5-ffac-4851-914a-00dc84496f91\" (UID: \"f35795d5-ffac-4851-914a-00dc84496f91\") " Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.901964 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-logs" (OuterVolumeSpecName: "logs") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.906301 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-scripts" (OuterVolumeSpecName: "scripts") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.908104 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.908269 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.908951 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-logs" (OuterVolumeSpecName: "logs") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.914868 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35795d5-ffac-4851-914a-00dc84496f91-kube-api-access-7tplp" (OuterVolumeSpecName: "kube-api-access-7tplp") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "kube-api-access-7tplp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.915200 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-scripts" (OuterVolumeSpecName: "scripts") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.915525 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54d8044d-232b-4d32-a2ed-fa2520b6513f-kube-api-access-cq6c4" (OuterVolumeSpecName: "kube-api-access-cq6c4") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "kube-api-access-cq6c4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.917661 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.927519 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.944018 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.949617 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.971180 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-config-data" (OuterVolumeSpecName: "config-data") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.977472 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.977921 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.982830 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f35795d5-ffac-4851-914a-00dc84496f91" (UID: "f35795d5-ffac-4851-914a-00dc84496f91"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:02 crc kubenswrapper[4811]: I0128 16:11:02.992980 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-config-data" (OuterVolumeSpecName: "config-data") pod "54d8044d-232b-4d32-a2ed-fa2520b6513f" (UID: "54d8044d-232b-4d32-a2ed-fa2520b6513f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.001772 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-combined-ca-bundle\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.001886 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-public-tls-certs\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.001955 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vj2f\" (UniqueName: \"kubernetes.io/projected/39ddd574-84b9-4065-9d72-5183fe430d4d-kube-api-access-8vj2f\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.001981 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data-custom\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.002060 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-scripts\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.002115 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39ddd574-84b9-4065-9d72-5183fe430d4d-logs\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.002160 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.002402 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39ddd574-84b9-4065-9d72-5183fe430d4d-etc-machine-id\") pod \"39ddd574-84b9-4065-9d72-5183fe430d4d\" (UID: \"39ddd574-84b9-4065-9d72-5183fe430d4d\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.003546 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/39ddd574-84b9-4065-9d72-5183fe430d4d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.004704 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39ddd574-84b9-4065-9d72-5183fe430d4d-logs" (OuterVolumeSpecName: "logs") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005592 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005620 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39ddd574-84b9-4065-9d72-5183fe430d4d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005633 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005644 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005658 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005668 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005679 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005690 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq6c4\" (UniqueName: \"kubernetes.io/projected/54d8044d-232b-4d32-a2ed-fa2520b6513f-kube-api-access-cq6c4\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005703 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f35795d5-ffac-4851-914a-00dc84496f91-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005714 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tplp\" (UniqueName: \"kubernetes.io/projected/f35795d5-ffac-4851-914a-00dc84496f91-kube-api-access-7tplp\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005744 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005761 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005773 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005785 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005797 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005807 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54d8044d-232b-4d32-a2ed-fa2520b6513f-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005817 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39ddd574-84b9-4065-9d72-5183fe430d4d-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.005826 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54d8044d-232b-4d32-a2ed-fa2520b6513f-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.007283 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35795d5-ffac-4851-914a-00dc84496f91-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.008654 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39ddd574-84b9-4065-9d72-5183fe430d4d-kube-api-access-8vj2f" (OuterVolumeSpecName: "kube-api-access-8vj2f") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "kube-api-access-8vj2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.021735 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.022073 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-scripts" (OuterVolumeSpecName: "scripts") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.023084 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.027263 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.031676 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.055909 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data" (OuterVolumeSpecName: "config-data") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.056683 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "39ddd574-84b9-4065-9d72-5183fe430d4d" (UID: "39ddd574-84b9-4065-9d72-5183fe430d4d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.109050 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.110566 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.110658 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.110733 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vj2f\" (UniqueName: \"kubernetes.io/projected/39ddd574-84b9-4065-9d72-5183fe430d4d-kube-api-access-8vj2f\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.110846 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.110920 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.110992 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.111068 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39ddd574-84b9-4065-9d72-5183fe430d4d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.140644 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-k598l" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.165875 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.187131 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.196017 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.200957 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.201787 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.212859 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.214942 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ab1b57-166e-4409-8b71-0bb922787623-operator-scripts\") pod \"a2ab1b57-166e-4409-8b71-0bb922787623\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.214985 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbhv4\" (UniqueName: \"kubernetes.io/projected/a2ab1b57-166e-4409-8b71-0bb922787623-kube-api-access-bbhv4\") pod \"a2ab1b57-166e-4409-8b71-0bb922787623\" (UID: \"a2ab1b57-166e-4409-8b71-0bb922787623\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.219125 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ab1b57-166e-4409-8b71-0bb922787623-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2ab1b57-166e-4409-8b71-0bb922787623" (UID: "a2ab1b57-166e-4409-8b71-0bb922787623"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.225887 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2ab1b57-166e-4409-8b71-0bb922787623-kube-api-access-bbhv4" (OuterVolumeSpecName: "kube-api-access-bbhv4") pod "a2ab1b57-166e-4409-8b71-0bb922787623" (UID: "a2ab1b57-166e-4409-8b71-0bb922787623"). InnerVolumeSpecName "kube-api-access-bbhv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.227989 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.234764 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.298916 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15d6fdf2-a4f1-4f30-b621-31877f96868f","Type":"ContainerDied","Data":"daa161bbe0a16e8c10dd09e6edf58ce3eca14f136962f92a4a1ed26d11d6b208"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.299227 4811 scope.go:117] "RemoveContainer" containerID="751f6099de1911b3d0549631c9b2b444b7a26a8a63ae6b18819217bbb07da0f4" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.299702 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.302202 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-89d7-account-create-update-7tjkx" event={"ID":"4815adcf-373b-4460-9f8c-d6dfaf471196","Type":"ContainerDied","Data":"b532553aef6d0c89b3606b4c1d13a53f2558f9e48a7a31264a5870ab31b91da5"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.302245 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-89d7-account-create-update-7tjkx" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.309879 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.310077 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"39ddd574-84b9-4065-9d72-5183fe430d4d","Type":"ContainerDied","Data":"99c00358645b86ca102061603eae530b38d31f10308e163274288cef1468eb1e"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317059 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9tmj\" (UniqueName: \"kubernetes.io/projected/0138ad61-fb17-46c7-bdd3-c65f15e2e186-kube-api-access-x9tmj\") pod \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317106 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-combined-ca-bundle\") pod \"d75f7765-1a6b-4bb5-819a-6891694a29c8\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317139 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-combined-ca-bundle\") pod \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317169 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-combined-ca-bundle\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317188 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-nova-metadata-tls-certs\") pod \"15d6fdf2-a4f1-4f30-b621-31877f96868f\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317207 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-internal-tls-certs\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317228 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data-custom\") pod \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317268 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-combined-ca-bundle\") pod \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317288 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data-custom\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317307 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x92tf\" (UniqueName: \"kubernetes.io/projected/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-api-access-x92tf\") pod \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317359 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-memcached-tls-certs\") pod \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317385 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317416 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-config\") pod \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317472 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d6fdf2-a4f1-4f30-b621-31877f96868f-logs\") pod \"15d6fdf2-a4f1-4f30-b621-31877f96868f\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317494 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-combined-ca-bundle\") pod \"15d6fdf2-a4f1-4f30-b621-31877f96868f\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317527 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/622ebcc7-b645-4db6-86c0-3546523fb7c7-logs\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317542 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d75f7765-1a6b-4bb5-819a-6891694a29c8-logs\") pod \"d75f7765-1a6b-4bb5-819a-6891694a29c8\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317565 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg9mz\" (UniqueName: \"kubernetes.io/projected/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kube-api-access-jg9mz\") pod \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317585 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-combined-ca-bundle\") pod \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317627 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data\") pod \"1761893b-d911-4596-b0b3-ce2d25d0384f\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317646 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-config-data\") pod \"15d6fdf2-a4f1-4f30-b621-31877f96868f\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317663 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-scripts\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317692 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data-custom\") pod \"1761893b-d911-4596-b0b3-ce2d25d0384f\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317717 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-internal-tls-certs\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317745 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-config-data\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317761 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-certs\") pod \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\" (UID: \"f80333f7-9036-49d9-8c68-03e4ef8f9ee8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317791 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a4fd4e-29a2-464f-aca7-3f856ed15221-logs\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317809 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-internal-tls-certs\") pod \"d75f7765-1a6b-4bb5-819a-6891694a29c8\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317853 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-public-tls-certs\") pod \"d75f7765-1a6b-4bb5-819a-6891694a29c8\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317881 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-combined-ca-bundle\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317911 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tpdv\" (UniqueName: \"kubernetes.io/projected/1761893b-d911-4596-b0b3-ce2d25d0384f-kube-api-access-2tpdv\") pod \"1761893b-d911-4596-b0b3-ce2d25d0384f\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317935 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kolla-config\") pod \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317954 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-config-data\") pod \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\" (UID: \"84b57c00-a800-4b82-98c7-8ebcc04c2ab6\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317970 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1761893b-d911-4596-b0b3-ce2d25d0384f-logs\") pod \"1761893b-d911-4596-b0b3-ce2d25d0384f\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.317986 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-public-tls-certs\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318005 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-combined-ca-bundle\") pod \"1761893b-d911-4596-b0b3-ce2d25d0384f\" (UID: \"1761893b-d911-4596-b0b3-ce2d25d0384f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318021 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwqkn\" (UniqueName: \"kubernetes.io/projected/15d6fdf2-a4f1-4f30-b621-31877f96868f-kube-api-access-wwqkn\") pod \"15d6fdf2-a4f1-4f30-b621-31877f96868f\" (UID: \"15d6fdf2-a4f1-4f30-b621-31877f96868f\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318043 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45rmw\" (UniqueName: \"kubernetes.io/projected/a1a4fd4e-29a2-464f-aca7-3f856ed15221-kube-api-access-45rmw\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318065 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0138ad61-fb17-46c7-bdd3-c65f15e2e186-logs\") pod \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318083 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vgnl\" (UniqueName: \"kubernetes.io/projected/d75f7765-1a6b-4bb5-819a-6891694a29c8-kube-api-access-9vgnl\") pod \"d75f7765-1a6b-4bb5-819a-6891694a29c8\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318112 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7bfb\" (UniqueName: \"kubernetes.io/projected/622ebcc7-b645-4db6-86c0-3546523fb7c7-kube-api-access-n7bfb\") pod \"622ebcc7-b645-4db6-86c0-3546523fb7c7\" (UID: \"622ebcc7-b645-4db6-86c0-3546523fb7c7\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318131 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-public-tls-certs\") pod \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\" (UID: \"a1a4fd4e-29a2-464f-aca7-3f856ed15221\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318152 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data\") pod \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\" (UID: \"0138ad61-fb17-46c7-bdd3-c65f15e2e186\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318175 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-config-data\") pod \"d75f7765-1a6b-4bb5-819a-6891694a29c8\" (UID: \"d75f7765-1a6b-4bb5-819a-6891694a29c8\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318514 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ab1b57-166e-4409-8b71-0bb922787623-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.318533 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbhv4\" (UniqueName: \"kubernetes.io/projected/a2ab1b57-166e-4409-8b71-0bb922787623-kube-api-access-bbhv4\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.319710 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/622ebcc7-b645-4db6-86c0-3546523fb7c7-logs" (OuterVolumeSpecName: "logs") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.320289 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15d6fdf2-a4f1-4f30-b621-31877f96868f-logs" (OuterVolumeSpecName: "logs") pod "15d6fdf2-a4f1-4f30-b621-31877f96868f" (UID: "15d6fdf2-a4f1-4f30-b621-31877f96868f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.321141 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0138ad61-fb17-46c7-bdd3-c65f15e2e186-kube-api-access-x9tmj" (OuterVolumeSpecName: "kube-api-access-x9tmj") pod "0138ad61-fb17-46c7-bdd3-c65f15e2e186" (UID: "0138ad61-fb17-46c7-bdd3-c65f15e2e186"). InnerVolumeSpecName "kube-api-access-x9tmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.323635 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78bd467b68-h25ls" event={"ID":"622ebcc7-b645-4db6-86c0-3546523fb7c7","Type":"ContainerDied","Data":"56df644148d58959d35a4958b09728943bd6c0a1ebb2065d7d9466347764758c"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.323726 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78bd467b68-h25ls" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.327708 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-config-data" (OuterVolumeSpecName: "config-data") pod "84b57c00-a800-4b82-98c7-8ebcc04c2ab6" (UID: "84b57c00-a800-4b82-98c7-8ebcc04c2ab6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.328845 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1761893b-d911-4596-b0b3-ce2d25d0384f-logs" (OuterVolumeSpecName: "logs") pod "1761893b-d911-4596-b0b3-ce2d25d0384f" (UID: "1761893b-d911-4596-b0b3-ce2d25d0384f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.328894 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-scripts" (OuterVolumeSpecName: "scripts") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.331561 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d75f7765-1a6b-4bb5-819a-6891694a29c8-logs" (OuterVolumeSpecName: "logs") pod "d75f7765-1a6b-4bb5-819a-6891694a29c8" (UID: "d75f7765-1a6b-4bb5-819a-6891694a29c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.347237 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0138ad61-fb17-46c7-bdd3-c65f15e2e186-logs" (OuterVolumeSpecName: "logs") pod "0138ad61-fb17-46c7-bdd3-c65f15e2e186" (UID: "0138ad61-fb17-46c7-bdd3-c65f15e2e186"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.347875 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1761893b-d911-4596-b0b3-ce2d25d0384f" (UID: "1761893b-d911-4596-b0b3-ce2d25d0384f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.348171 4811 scope.go:117] "RemoveContainer" containerID="f1c697d3014cf2adc2ad5baf6f4f5abebc7a014d2d73bdad9756ab37a9f1de10" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.349851 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.351276 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.351305 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1a4fd4e-29a2-464f-aca7-3f856ed15221-logs" (OuterVolumeSpecName: "logs") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.351410 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"54d8044d-232b-4d32-a2ed-fa2520b6513f","Type":"ContainerDied","Data":"2e2808ccbcdbb41999dd2620b1eff19f3bdb7b5558d3f05fd590eda1d1e42396"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.352956 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "84b57c00-a800-4b82-98c7-8ebcc04c2ab6" (UID: "84b57c00-a800-4b82-98c7-8ebcc04c2ab6"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.354073 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0138ad61-fb17-46c7-bdd3-c65f15e2e186" (UID: "0138ad61-fb17-46c7-bdd3-c65f15e2e186"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.356138 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-api-access-x92tf" (OuterVolumeSpecName: "kube-api-access-x92tf") pod "f80333f7-9036-49d9-8c68-03e4ef8f9ee8" (UID: "f80333f7-9036-49d9-8c68-03e4ef8f9ee8"). InnerVolumeSpecName "kube-api-access-x92tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.361027 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.365219 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b7c5df494-2hz6m" event={"ID":"a1a4fd4e-29a2-464f-aca7-3f856ed15221","Type":"ContainerDied","Data":"4838faa0b0365355d7340151c0f804d22098b22990de0db69cf5432869a05908"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.365424 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7b7c5df494-2hz6m" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.365803 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kube-api-access-jg9mz" (OuterVolumeSpecName: "kube-api-access-jg9mz") pod "84b57c00-a800-4b82-98c7-8ebcc04c2ab6" (UID: "84b57c00-a800-4b82-98c7-8ebcc04c2ab6"). InnerVolumeSpecName "kube-api-access-jg9mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.366417 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d75f7765-1a6b-4bb5-819a-6891694a29c8-kube-api-access-9vgnl" (OuterVolumeSpecName: "kube-api-access-9vgnl") pod "d75f7765-1a6b-4bb5-819a-6891694a29c8" (UID: "d75f7765-1a6b-4bb5-819a-6891694a29c8"). InnerVolumeSpecName "kube-api-access-9vgnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: E0128 16:11:03.368517 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.370211 4811 generic.go:334] "Generic (PLEG): container finished" podID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerID="b0391cee65f30aa023e4b8a6992aae9b85400f523a386a5570a75825a412741e" exitCode=0 Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.370543 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b","Type":"ContainerDied","Data":"b0391cee65f30aa023e4b8a6992aae9b85400f523a386a5570a75825a412741e"} Jan 28 16:11:03 crc kubenswrapper[4811]: E0128 16:11:03.371636 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.374745 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-k598l" event={"ID":"a2ab1b57-166e-4409-8b71-0bb922787623","Type":"ContainerDied","Data":"093d125098a6804dc699d75f4d7b3713eeb1b57a33cde60d8b85f2bc1403177a"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.374789 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-k598l" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.374891 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/622ebcc7-b645-4db6-86c0-3546523fb7c7-kube-api-access-n7bfb" (OuterVolumeSpecName: "kube-api-access-n7bfb") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "kube-api-access-n7bfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.375020 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1a4fd4e-29a2-464f-aca7-3f856ed15221-kube-api-access-45rmw" (OuterVolumeSpecName: "kube-api-access-45rmw") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "kube-api-access-45rmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.375755 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15d6fdf2-a4f1-4f30-b621-31877f96868f-kube-api-access-wwqkn" (OuterVolumeSpecName: "kube-api-access-wwqkn") pod "15d6fdf2-a4f1-4f30-b621-31877f96868f" (UID: "15d6fdf2-a4f1-4f30-b621-31877f96868f"). InnerVolumeSpecName "kube-api-access-wwqkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.390257 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.401746 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1761893b-d911-4596-b0b3-ce2d25d0384f-kube-api-access-2tpdv" (OuterVolumeSpecName: "kube-api-access-2tpdv") pod "1761893b-d911-4596-b0b3-ce2d25d0384f" (UID: "1761893b-d911-4596-b0b3-ce2d25d0384f"). InnerVolumeSpecName "kube-api-access-2tpdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.421825 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84b57c00-a800-4b82-98c7-8ebcc04c2ab6" (UID: "84b57c00-a800-4b82-98c7-8ebcc04c2ab6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422297 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a4fd4e-29a2-464f-aca7-3f856ed15221-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422339 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tpdv\" (UniqueName: \"kubernetes.io/projected/1761893b-d911-4596-b0b3-ce2d25d0384f-kube-api-access-2tpdv\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422363 4811 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422376 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422387 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1761893b-d911-4596-b0b3-ce2d25d0384f-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422403 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45rmw\" (UniqueName: \"kubernetes.io/projected/a1a4fd4e-29a2-464f-aca7-3f856ed15221-kube-api-access-45rmw\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422413 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwqkn\" (UniqueName: \"kubernetes.io/projected/15d6fdf2-a4f1-4f30-b621-31877f96868f-kube-api-access-wwqkn\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422421 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0138ad61-fb17-46c7-bdd3-c65f15e2e186-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422448 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vgnl\" (UniqueName: \"kubernetes.io/projected/d75f7765-1a6b-4bb5-819a-6891694a29c8-kube-api-access-9vgnl\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422458 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7bfb\" (UniqueName: \"kubernetes.io/projected/622ebcc7-b645-4db6-86c0-3546523fb7c7-kube-api-access-n7bfb\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422468 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9tmj\" (UniqueName: \"kubernetes.io/projected/0138ad61-fb17-46c7-bdd3-c65f15e2e186-kube-api-access-x9tmj\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422477 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422485 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422497 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422509 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x92tf\" (UniqueName: \"kubernetes.io/projected/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-api-access-x92tf\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422518 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d6fdf2-a4f1-4f30-b621-31877f96868f-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422527 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d75f7765-1a6b-4bb5-819a-6891694a29c8-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422536 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/622ebcc7-b645-4db6-86c0-3546523fb7c7-logs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422544 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg9mz\" (UniqueName: \"kubernetes.io/projected/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-kube-api-access-jg9mz\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422553 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.422563 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.437069 4811 scope.go:117] "RemoveContainer" containerID="f0a91b4e447ff1a52135fb776145bb39ff121699ece20c0eab9dbf3f1f9735a1" Jan 28 16:11:03 crc kubenswrapper[4811]: E0128 16:11:03.437309 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:11:03 crc kubenswrapper[4811]: E0128 16:11:03.437500 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" containerName="nova-cell1-conductor-conductor" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.454166 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f35795d5-ffac-4851-914a-00dc84496f91","Type":"ContainerDied","Data":"384d30ad658a64b0ef3eaa69ac0df8c33b198c66d440a43063da0bd02a00c82f"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.454305 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.460176 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f80333f7-9036-49d9-8c68-03e4ef8f9ee8","Type":"ContainerDied","Data":"bf3f0f91a9b6080c89dd07d5273046218b41be1d4dc81fb77ede832dc3bc4508"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.460298 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.462908 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6dd44d16-5b30-493c-9dd2-1ba856a4393a/ovn-northd/0.log" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.463909 4811 generic.go:334] "Generic (PLEG): container finished" podID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerID="4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94" exitCode=139 Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.464013 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6dd44d16-5b30-493c-9dd2-1ba856a4393a","Type":"ContainerDied","Data":"4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.464681 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d75f7765-1a6b-4bb5-819a-6891694a29c8" (UID: "d75f7765-1a6b-4bb5-819a-6891694a29c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.465965 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-917c-account-create-update-slsg4" event={"ID":"13f72e20-02af-43d9-8400-8180b175795e","Type":"ContainerDied","Data":"fb7d5e61f53c019ac1a1c55760774d6a07b10f7c68beb7d1afb0423c7928d68c"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.466082 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-917c-account-create-update-slsg4" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.468724 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "f80333f7-9036-49d9-8c68-03e4ef8f9ee8" (UID: "f80333f7-9036-49d9-8c68-03e4ef8f9ee8"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.470783 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-89d7-account-create-update-7tjkx"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.471486 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.475754 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5bd9c6c974-98zpp" event={"ID":"1761893b-d911-4596-b0b3-ce2d25d0384f","Type":"ContainerDied","Data":"84123163ec5e9d54bd21c7668c22aa5ccf1b0c888c2e5be9bbc54afe5777706b"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.483832 4811 scope.go:117] "RemoveContainer" containerID="1e0db5f5c865912285b18890df2afbfd3dcd0f3d6c9ece1eee5e5a4b85b96528" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.484217 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.484238 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d75f7765-1a6b-4bb5-819a-6891694a29c8","Type":"ContainerDied","Data":"824fcca7539f34f8ff4e74fcad43febe26cfb9a9224814e46dbcc0d1481c8897"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.486830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2952-account-create-update-df9kg" event={"ID":"7b876d22-ae70-479d-82b1-113a9d760a0f","Type":"ContainerDied","Data":"e291178e43d515b5144ff7ee4fa996a985a42844a923284bc56358789d2e5755"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.486979 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2952-account-create-update-df9kg" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.487275 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-89d7-account-create-update-7tjkx"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.494967 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-k598l"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.496374 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"84b57c00-a800-4b82-98c7-8ebcc04c2ab6","Type":"ContainerDied","Data":"18bbb3034bb69ed9e6b1b0f7994101be22ad34122776ec8404e6e32b129e023a"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.496488 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.503840 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69cfb4f585-8qngp" event={"ID":"0138ad61-fb17-46c7-bdd3-c65f15e2e186","Type":"ContainerDied","Data":"2621fb304b9cbb45829569083d08665b417abddf1cca392888d36f88cabdf03f"} Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.503969 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69cfb4f585-8qngp" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.507399 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-k598l"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.519639 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.521995 4811 scope.go:117] "RemoveContainer" containerID="f3194b493d5ddf32d8f9d2a436bb598bd51e3c3d501ca80d66d47ad0bff2fffb" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.525707 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.535274 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.535299 4811 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.543758 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-917c-account-create-update-slsg4"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.548753 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-917c-account-create-update-slsg4"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.553342 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.598956 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2952-account-create-update-df9kg"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.599377 4811 scope.go:117] "RemoveContainer" containerID="656706d31855991db104dc63417a4a2dad789a13059f74bc9caee3f4a9998a35" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.619579 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6dd44d16-5b30-493c-9dd2-1ba856a4393a/ovn-northd/0.log" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.620038 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.623174 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-2952-account-create-update-df9kg"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.629759 4811 scope.go:117] "RemoveContainer" containerID="d388772dd1846d95d234d7f0fb94958aae8f34f960a0d56e2efdfefc4e673f8d" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.633184 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.637973 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.647280 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.665061 4811 scope.go:117] "RemoveContainer" containerID="039d7a3660d21b25f1bc174e77b3130cc47d2cdf0049929c92da48d3adb0d4b0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.667642 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.685659 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1761893b-d911-4596-b0b3-ce2d25d0384f" (UID: "1761893b-d911-4596-b0b3-ce2d25d0384f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.702952 4811 scope.go:117] "RemoveContainer" containerID="6cba40ddbfcb2754d34d28f3d64877131d7be93e51d4f11947d0c71c0b815b3f" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.709966 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-config-data" (OuterVolumeSpecName: "config-data") pod "d75f7765-1a6b-4bb5-819a-6891694a29c8" (UID: "d75f7765-1a6b-4bb5-819a-6891694a29c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.710606 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-config-data" (OuterVolumeSpecName: "config-data") pod "15d6fdf2-a4f1-4f30-b621-31877f96868f" (UID: "15d6fdf2-a4f1-4f30-b621-31877f96868f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.739382 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-rundir\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.739469 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-operator-scripts\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.739552 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-generated\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.739637 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-scripts\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.739661 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-metrics-certs-tls-certs\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.739691 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb8t8\" (UniqueName: \"kubernetes.io/projected/6dd44d16-5b30-493c-9dd2-1ba856a4393a-kube-api-access-rb8t8\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.740971 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-combined-ca-bundle\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.741635 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-galera-tls-certs\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.741781 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-config\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.741824 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.742618 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kolla-config\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.742779 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-default\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.742960 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-northd-tls-certs\") pod \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\" (UID: \"6dd44d16-5b30-493c-9dd2-1ba856a4393a\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.743102 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kh5jt\" (UniqueName: \"kubernetes.io/projected/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kube-api-access-kh5jt\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.745971 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-combined-ca-bundle\") pod \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\" (UID: \"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b\") " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.743802 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.744174 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.744789 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.745458 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-scripts" (OuterVolumeSpecName: "scripts") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.745399 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f80333f7-9036-49d9-8c68-03e4ef8f9ee8" (UID: "f80333f7-9036-49d9-8c68-03e4ef8f9ee8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.745941 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750041 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750069 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750084 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750099 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750111 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750123 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750135 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750146 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.750194 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0138ad61-fb17-46c7-bdd3-c65f15e2e186" (UID: "0138ad61-fb17-46c7-bdd3-c65f15e2e186"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.746423 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.747167 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-config" (OuterVolumeSpecName: "config") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.753118 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kube-api-access-kh5jt" (OuterVolumeSpecName: "kube-api-access-kh5jt") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "kube-api-access-kh5jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.755069 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dd44d16-5b30-493c-9dd2-1ba856a4393a-kube-api-access-rb8t8" (OuterVolumeSpecName: "kube-api-access-rb8t8") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "kube-api-access-rb8t8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.757400 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15d6fdf2-a4f1-4f30-b621-31877f96868f" (UID: "15d6fdf2-a4f1-4f30-b621-31877f96868f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.795149 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data" (OuterVolumeSpecName: "config-data") pod "1761893b-d911-4596-b0b3-ce2d25d0384f" (UID: "1761893b-d911-4596-b0b3-ce2d25d0384f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.795787 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.797676 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "f80333f7-9036-49d9-8c68-03e4ef8f9ee8" (UID: "f80333f7-9036-49d9-8c68-03e4ef8f9ee8"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.801018 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "15d6fdf2-a4f1-4f30-b621-31877f96868f" (UID: "15d6fdf2-a4f1-4f30-b621-31877f96868f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.808767 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.821704 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "84b57c00-a800-4b82-98c7-8ebcc04c2ab6" (UID: "84b57c00-a800-4b82-98c7-8ebcc04c2ab6"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.827566 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data" (OuterVolumeSpecName: "config-data") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.849172 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.851565 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1761893b-d911-4596-b0b3-ce2d25d0384f-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: E0128 16:11:03.851606 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 28 16:11:03 crc kubenswrapper[4811]: E0128 16:11:03.851789 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data podName:3b7599aa-7114-46c9-bf38-b6d735b75326 nodeName:}" failed. No retries permitted until 2026-01-28 16:11:11.85177297 +0000 UTC m=+1564.606136553 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data") pod "rabbitmq-cell1-server-0" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326") : configmap "rabbitmq-cell1-config-data" not found Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852212 4811 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f80333f7-9036-49d9-8c68-03e4ef8f9ee8-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852238 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb8t8\" (UniqueName: \"kubernetes.io/projected/6dd44d16-5b30-493c-9dd2-1ba856a4393a-kube-api-access-rb8t8\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852248 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852278 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852289 4811 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852297 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852306 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dd44d16-5b30-493c-9dd2-1ba856a4393a-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852333 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852363 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852373 4811 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b57c00-a800-4b82-98c7-8ebcc04c2ab6-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852392 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d6fdf2-a4f1-4f30-b621-31877f96868f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852408 4811 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852475 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.852487 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kh5jt\" (UniqueName: \"kubernetes.io/projected/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-kube-api-access-kh5jt\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.863686 4811 scope.go:117] "RemoveContainer" containerID="f494267049de0afa9aadacc83552d047aaa6b559a44a9bc8ab504e3eb508fb84" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.879696 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data" (OuterVolumeSpecName: "config-data") pod "0138ad61-fb17-46c7-bdd3-c65f15e2e186" (UID: "0138ad61-fb17-46c7-bdd3-c65f15e2e186"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.888025 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-config-data" (OuterVolumeSpecName: "config-data") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.892643 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.896960 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d75f7765-1a6b-4bb5-819a-6891694a29c8" (UID: "d75f7765-1a6b-4bb5-819a-6891694a29c8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.920990 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.923248 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "622ebcc7-b645-4db6-86c0-3546523fb7c7" (UID: "622ebcc7-b645-4db6-86c0-3546523fb7c7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.934673 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958233 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958280 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958293 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622ebcc7-b645-4db6-86c0-3546523fb7c7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958305 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958345 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0138ad61-fb17-46c7-bdd3-c65f15e2e186-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958356 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.958370 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.964123 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.964503 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.968242 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d75f7765-1a6b-4bb5-819a-6891694a29c8" (UID: "d75f7765-1a6b-4bb5-819a-6891694a29c8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.973071 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.976261 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" (UID: "9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:03 crc kubenswrapper[4811]: I0128 16:11:03.995246 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a1a4fd4e-29a2-464f-aca7-3f856ed15221" (UID: "a1a4fd4e-29a2-464f-aca7-3f856ed15221"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.008409 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "6dd44d16-5b30-493c-9dd2-1ba856a4393a" (UID: "6dd44d16-5b30-493c-9dd2-1ba856a4393a"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.060529 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.060571 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.060583 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d75f7765-1a6b-4bb5-819a-6891694a29c8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.060595 4811 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd44d16-5b30-493c-9dd2-1ba856a4393a-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.060610 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1a4fd4e-29a2-464f-aca7-3f856ed15221-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.060621 4811 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.092530 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.206:3000/\": dial tcp 10.217.0.206:3000: connect: connection refused" Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.132792 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.137931 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.139395 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.139437 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerName="nova-cell0-conductor-conductor" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.179703 4811 scope.go:117] "RemoveContainer" containerID="a764cfb01d9db54ab17147acfefb019173650bcda8aafd295afa3a359d0f21a0" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.214054 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-69cfb4f585-8qngp"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.222506 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-69cfb4f585-8qngp"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.228582 4811 scope.go:117] "RemoveContainer" containerID="a156c42e817f2655b9c4afbf1946c7359670560b8886dbd5e1f1475d498cce7d" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.229132 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-78bd467b68-h25ls"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.235508 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-78bd467b68-h25ls"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.242159 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5bd9c6c974-98zpp"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.251209 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-5bd9c6c974-98zpp"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.256773 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.261236 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.265385 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.267839 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.271876 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.290281 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.293740 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.313839 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.317044 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.333627 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.339781 4811 scope.go:117] "RemoveContainer" containerID="84f6ea378d0b165999f2606c252834dba8614670922d49e48387042f20f1cf01" Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.339894 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.346896 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.346957 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerName="nova-scheduler-scheduler" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.353631 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" path="/var/lib/kubelet/pods/0138ad61-fb17-46c7-bdd3-c65f15e2e186/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.354267 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13f72e20-02af-43d9-8400-8180b175795e" path="/var/lib/kubelet/pods/13f72e20-02af-43d9-8400-8180b175795e/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.354661 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" path="/var/lib/kubelet/pods/15d6fdf2-a4f1-4f30-b621-31877f96868f/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.355831 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" path="/var/lib/kubelet/pods/1761893b-d911-4596-b0b3-ce2d25d0384f/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.356445 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" path="/var/lib/kubelet/pods/39ddd574-84b9-4065-9d72-5183fe430d4d/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.357187 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4815adcf-373b-4460-9f8c-d6dfaf471196" path="/var/lib/kubelet/pods/4815adcf-373b-4460-9f8c-d6dfaf471196/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.360288 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" path="/var/lib/kubelet/pods/54d8044d-232b-4d32-a2ed-fa2520b6513f/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.361080 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" path="/var/lib/kubelet/pods/622ebcc7-b645-4db6-86c0-3546523fb7c7/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.361641 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b876d22-ae70-479d-82b1-113a9d760a0f" path="/var/lib/kubelet/pods/7b876d22-ae70-479d-82b1-113a9d760a0f/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.364180 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" path="/var/lib/kubelet/pods/84b57c00-a800-4b82-98c7-8ebcc04c2ab6/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.364844 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" path="/var/lib/kubelet/pods/a2ab1b57-166e-4409-8b71-0bb922787623/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.365206 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a4d1b5-4521-455f-9ed1-4a2de1b678ee" path="/var/lib/kubelet/pods/d6a4d1b5-4521-455f-9ed1-4a2de1b678ee/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.365793 4811 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 28 16:11:04 crc kubenswrapper[4811]: E0128 16:11:04.365858 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data podName:23095127-8b86-445a-8c32-1e6bc14bf05e nodeName:}" failed. No retries permitted until 2026-01-28 16:11:12.365840725 +0000 UTC m=+1565.120204318 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data") pod "rabbitmq-server-0" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e") : configmap "rabbitmq-config-data" not found Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.369594 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" path="/var/lib/kubelet/pods/d75f7765-1a6b-4bb5-819a-6891694a29c8/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.370628 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f35795d5-ffac-4851-914a-00dc84496f91" path="/var/lib/kubelet/pods/f35795d5-ffac-4851-914a-00dc84496f91/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.372698 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f80333f7-9036-49d9-8c68-03e4ef8f9ee8" path="/var/lib/kubelet/pods/f80333f7-9036-49d9-8c68-03e4ef8f9ee8/volumes" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.373740 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7b7c5df494-2hz6m"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.373765 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-7b7c5df494-2hz6m"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.375887 4811 scope.go:117] "RemoveContainer" containerID="01de404d5b6e89fb138dbe6253054f8965d2469aab77f7679acb0d66fb85419b" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.412342 4811 scope.go:117] "RemoveContainer" containerID="536a91cb8574f375dfd0ebc16db3f46162b78ee4bd2c334c6a012f5ff6e1ed7a" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.438747 4811 scope.go:117] "RemoveContainer" containerID="29723b95d76a6ab96074381d9690ad4cd770a48e64bafab8c23717a36eaddbf2" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.461785 4811 scope.go:117] "RemoveContainer" containerID="7f7face6d95fe804267fa4f30c1e01cfdaa41613853274a35697b679666f13c6" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.491710 4811 scope.go:117] "RemoveContainer" containerID="7bb42b27e1f14ad69f131fe0e78ac3ea542276272ff3e8a4de4b7fc31e901928" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.531699 4811 scope.go:117] "RemoveContainer" containerID="9c930ce4914f5a68852c3724ae30e8283ace4f330ad36c534f8e4f690608fe20" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.542026 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6dd44d16-5b30-493c-9dd2-1ba856a4393a/ovn-northd/0.log" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.551953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6dd44d16-5b30-493c-9dd2-1ba856a4393a","Type":"ContainerDied","Data":"016870e708b72d125c622e49d11ef1c30dd8bb7e0682bc45de42857aa38e3b58"} Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.552163 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.559903 4811 generic.go:334] "Generic (PLEG): container finished" podID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerID="e7577902b3c5f2e0c6da1d3f007db93314f77eb274a3406fc13d0cf0a0a1d27e" exitCode=0 Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.559987 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3b7599aa-7114-46c9-bf38-b6d735b75326","Type":"ContainerDied","Data":"e7577902b3c5f2e0c6da1d3f007db93314f77eb274a3406fc13d0cf0a0a1d27e"} Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.570688 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b","Type":"ContainerDied","Data":"395f6a1cf5287731097f623dc9836e9c87bf16746900eaef633ccbd3245152cd"} Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.570752 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.587086 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.604597 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.609550 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-86846dcc5c-hkg4d" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.169:9696/\": dial tcp 10.217.0.169:9696: connect: connection refused" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.613584 4811 scope.go:117] "RemoveContainer" containerID="fcc510fe7aab0a06fcaa0e25c25b295dadfdee8c254acdff99aa414590067770" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.613713 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.616419 4811 generic.go:334] "Generic (PLEG): container finished" podID="e10c1c14-4c1a-445e-9c98-1f0b6b334802" containerID="1408d806929ba9f320ef606ac871fcb79473917f47486e47e26772ffb153aa37" exitCode=0 Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.617146 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b7bd9558c-kbczp" event={"ID":"e10c1c14-4c1a-445e-9c98-1f0b6b334802","Type":"ContainerDied","Data":"1408d806929ba9f320ef606ac871fcb79473917f47486e47e26772ffb153aa37"} Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.623712 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.658580 4811 scope.go:117] "RemoveContainer" containerID="89af73945fbafecef0415a87ddc471cbf18d61957ff14ef2a2681bac6cbcc415" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.678292 4811 scope.go:117] "RemoveContainer" containerID="4bf48c2e9b3287834317b579380166a3b7e88ed93df7ceeb2163cfc00138fa94" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.702928 4811 scope.go:117] "RemoveContainer" containerID="1112ae05c1cf02d134ea38159d36c4ca6bf5c3e27f26f8a7397f9fd2b51e726b" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.730381 4811 scope.go:117] "RemoveContainer" containerID="b0391cee65f30aa023e4b8a6992aae9b85400f523a386a5570a75825a412741e" Jan 28 16:11:04 crc kubenswrapper[4811]: I0128 16:11:04.788620 4811 scope.go:117] "RemoveContainer" containerID="518785f09823d3db909d33a7b324f729682e76e6299d31a97670582c7114b971" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.026538 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.037567 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181104 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64ts8\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-kube-api-access-64ts8\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181171 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-confd\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181267 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-fernet-keys\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181321 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-credential-keys\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181354 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181405 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181460 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-tls\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181500 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-plugins-conf\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181541 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-server-conf\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181579 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-erlang-cookie\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181666 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b7599aa-7114-46c9-bf38-b6d735b75326-erlang-cookie-secret\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181716 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-plugins\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181763 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-config-data\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181811 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b7599aa-7114-46c9-bf38-b6d735b75326-pod-info\") pod \"3b7599aa-7114-46c9-bf38-b6d735b75326\" (UID: \"3b7599aa-7114-46c9-bf38-b6d735b75326\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181835 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-internal-tls-certs\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181922 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwsxl\" (UniqueName: \"kubernetes.io/projected/e10c1c14-4c1a-445e-9c98-1f0b6b334802-kube-api-access-vwsxl\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181941 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-combined-ca-bundle\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.181982 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-public-tls-certs\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.182033 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-scripts\") pod \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\" (UID: \"e10c1c14-4c1a-445e-9c98-1f0b6b334802\") " Jan 28 16:11:05 crc kubenswrapper[4811]: E0128 16:11:05.182531 4811 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 28 16:11:05 crc kubenswrapper[4811]: E0128 16:11:05.182588 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts podName:1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e nodeName:}" failed. No retries permitted until 2026-01-28 16:11:13.182572161 +0000 UTC m=+1565.936935734 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts") pod "ovn-controller-fxxlf" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e") : configmap "ovncontroller-scripts" not found Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.183039 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.183622 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.190242 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.193320 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-kube-api-access-64ts8" (OuterVolumeSpecName: "kube-api-access-64ts8") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "kube-api-access-64ts8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.193794 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.193956 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.194214 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-scripts" (OuterVolumeSpecName: "scripts") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.194506 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.194420 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e10c1c14-4c1a-445e-9c98-1f0b6b334802-kube-api-access-vwsxl" (OuterVolumeSpecName: "kube-api-access-vwsxl") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "kube-api-access-vwsxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.195329 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b7599aa-7114-46c9-bf38-b6d735b75326-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.195797 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3b7599aa-7114-46c9-bf38-b6d735b75326-pod-info" (OuterVolumeSpecName: "pod-info") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.196310 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.220343 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data" (OuterVolumeSpecName: "config-data") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.241666 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.249664 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-server-conf" (OuterVolumeSpecName: "server-conf") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.254705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-config-data" (OuterVolumeSpecName: "config-data") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.266307 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283861 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64ts8\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-kube-api-access-64ts8\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283896 4811 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283911 4811 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283943 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283958 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283969 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283980 4811 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.283991 4811 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b7599aa-7114-46c9-bf38-b6d735b75326-server-conf\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284002 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284013 4811 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b7599aa-7114-46c9-bf38-b6d735b75326-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284024 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284034 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284045 4811 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b7599aa-7114-46c9-bf38-b6d735b75326-pod-info\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284057 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284068 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwsxl\" (UniqueName: \"kubernetes.io/projected/e10c1c14-4c1a-445e-9c98-1f0b6b334802-kube-api-access-vwsxl\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284079 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.284089 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.298352 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e10c1c14-4c1a-445e-9c98-1f0b6b334802" (UID: "e10c1c14-4c1a-445e-9c98-1f0b6b334802"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.309025 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.327876 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3b7599aa-7114-46c9-bf38-b6d735b75326" (UID: "3b7599aa-7114-46c9-bf38-b6d735b75326"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.386339 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10c1c14-4c1a-445e-9c98-1f0b6b334802-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.386379 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b7599aa-7114-46c9-bf38-b6d735b75326-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.386388 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:05 crc kubenswrapper[4811]: E0128 16:11:05.438903 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 28 16:11:05 crc kubenswrapper[4811]: E0128 16:11:05.440135 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 28 16:11:05 crc kubenswrapper[4811]: E0128 16:11:05.441682 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 28 16:11:05 crc kubenswrapper[4811]: E0128 16:11:05.441714 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="galera" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.628483 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b7bd9558c-kbczp" event={"ID":"e10c1c14-4c1a-445e-9c98-1f0b6b334802","Type":"ContainerDied","Data":"e90dd6d3975db175f7c87a09a881db70b66a4faad043f22b529970f28a479de6"} Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.628848 4811 scope.go:117] "RemoveContainer" containerID="1408d806929ba9f320ef606ac871fcb79473917f47486e47e26772ffb153aa37" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.628526 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b7bd9558c-kbczp" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.640292 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3b7599aa-7114-46c9-bf38-b6d735b75326","Type":"ContainerDied","Data":"45b948098b16be556ad661750cce4e4c59d4eb55bf4a2b34bf0ebe3d475671d9"} Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.640392 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.653725 4811 generic.go:334] "Generic (PLEG): container finished" podID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" containerID="16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f" exitCode=0 Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.653787 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"94afc9c1-3ddc-4ad1-9df2-03c593f1d536","Type":"ContainerDied","Data":"16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f"} Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.664745 4811 generic.go:334] "Generic (PLEG): container finished" podID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerID="783bfe55389f6600eee189683b41f4d30b0ec2595be48aebc9462d419fec02fc" exitCode=0 Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.664906 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"23095127-8b86-445a-8c32-1e6bc14bf05e","Type":"ContainerDied","Data":"783bfe55389f6600eee189683b41f4d30b0ec2595be48aebc9462d419fec02fc"} Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.675111 4811 scope.go:117] "RemoveContainer" containerID="e7577902b3c5f2e0c6da1d3f007db93314f77eb274a3406fc13d0cf0a0a1d27e" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.706508 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.731356 4811 scope.go:117] "RemoveContainer" containerID="750be0d18f938176efd1aca389b486dcb19e5d7b9966a3f4fb7d1aa4cce062d6" Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.749004 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.777691 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7b7bd9558c-kbczp"] Jan 28 16:11:05 crc kubenswrapper[4811]: I0128 16:11:05.791691 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7b7bd9558c-kbczp"] Jan 28 16:11:06 crc kubenswrapper[4811]: E0128 16:11:05.895351 4811 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 28 16:11:06 crc kubenswrapper[4811]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-28T16:10:59Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 28 16:11:06 crc kubenswrapper[4811]: /etc/init.d/functions: line 589: 456 Alarm clock "$@" Jan 28 16:11:06 crc kubenswrapper[4811]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-fxxlf" message=< Jan 28 16:11:06 crc kubenswrapper[4811]: Exiting ovn-controller (1) [FAILED] Jan 28 16:11:06 crc kubenswrapper[4811]: Killing ovn-controller (1) [ OK ] Jan 28 16:11:06 crc kubenswrapper[4811]: 2026-01-28T16:10:59Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 28 16:11:06 crc kubenswrapper[4811]: /etc/init.d/functions: line 589: 456 Alarm clock "$@" Jan 28 16:11:06 crc kubenswrapper[4811]: > Jan 28 16:11:06 crc kubenswrapper[4811]: E0128 16:11:05.895393 4811 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 28 16:11:06 crc kubenswrapper[4811]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-28T16:10:59Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 28 16:11:06 crc kubenswrapper[4811]: /etc/init.d/functions: line 589: 456 Alarm clock "$@" Jan 28 16:11:06 crc kubenswrapper[4811]: > pod="openstack/ovn-controller-fxxlf" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" containerID="cri-o://ba856df2f3f7c7a7f9e7775dc89b697ebd4dd48e8a805217e5ee26242f8b0270" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:05.895488 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-fxxlf" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" containerID="cri-o://ba856df2f3f7c7a7f9e7775dc89b697ebd4dd48e8a805217e5ee26242f8b0270" gracePeriod=22 Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:05.982709 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107078 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/23095127-8b86-445a-8c32-1e6bc14bf05e-pod-info\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107155 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/23095127-8b86-445a-8c32-1e6bc14bf05e-erlang-cookie-secret\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107221 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-erlang-cookie\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107258 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-plugins\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107278 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7p8s\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-kube-api-access-r7p8s\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107297 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-plugins-conf\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107356 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-confd\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107389 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107459 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-tls\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107508 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107525 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-server-conf\") pod \"23095127-8b86-445a-8c32-1e6bc14bf05e\" (UID: \"23095127-8b86-445a-8c32-1e6bc14bf05e\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.108042 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.108361 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.107775 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.108820 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.108842 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.108853 4811 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.113593 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/23095127-8b86-445a-8c32-1e6bc14bf05e-pod-info" (OuterVolumeSpecName: "pod-info") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.113684 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23095127-8b86-445a-8c32-1e6bc14bf05e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.113683 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-kube-api-access-r7p8s" (OuterVolumeSpecName: "kube-api-access-r7p8s") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "kube-api-access-r7p8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.113916 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.114987 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.131310 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data" (OuterVolumeSpecName: "config-data") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.157388 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-server-conf" (OuterVolumeSpecName: "server-conf") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.194634 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "23095127-8b86-445a-8c32-1e6bc14bf05e" (UID: "23095127-8b86-445a-8c32-1e6bc14bf05e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210771 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7p8s\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-kube-api-access-r7p8s\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210805 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210817 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210827 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/23095127-8b86-445a-8c32-1e6bc14bf05e-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210859 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210877 4811 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/23095127-8b86-445a-8c32-1e6bc14bf05e-server-conf\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210888 4811 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/23095127-8b86-445a-8c32-1e6bc14bf05e-pod-info\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.210900 4811 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/23095127-8b86-445a-8c32-1e6bc14bf05e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.236194 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.312253 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.312564 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.363503 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" path="/var/lib/kubelet/pods/3b7599aa-7114-46c9-bf38-b6d735b75326/volumes" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.364501 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" path="/var/lib/kubelet/pods/6dd44d16-5b30-493c-9dd2-1ba856a4393a/volumes" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.365920 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" path="/var/lib/kubelet/pods/9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b/volumes" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.367013 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" path="/var/lib/kubelet/pods/a1a4fd4e-29a2-464f-aca7-3f856ed15221/volumes" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.367757 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e10c1c14-4c1a-445e-9c98-1f0b6b334802" path="/var/lib/kubelet/pods/e10c1c14-4c1a-445e-9c98-1f0b6b334802/volumes" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.413291 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-combined-ca-bundle\") pod \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.413587 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzqqd\" (UniqueName: \"kubernetes.io/projected/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-kube-api-access-dzqqd\") pod \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.413803 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-config-data\") pod \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\" (UID: \"94afc9c1-3ddc-4ad1-9df2-03c593f1d536\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.417339 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-kube-api-access-dzqqd" (OuterVolumeSpecName: "kube-api-access-dzqqd") pod "94afc9c1-3ddc-4ad1-9df2-03c593f1d536" (UID: "94afc9c1-3ddc-4ad1-9df2-03c593f1d536"). InnerVolumeSpecName "kube-api-access-dzqqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.433087 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-config-data" (OuterVolumeSpecName: "config-data") pod "94afc9c1-3ddc-4ad1-9df2-03c593f1d536" (UID: "94afc9c1-3ddc-4ad1-9df2-03c593f1d536"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.433624 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94afc9c1-3ddc-4ad1-9df2-03c593f1d536" (UID: "94afc9c1-3ddc-4ad1-9df2-03c593f1d536"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.515858 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzqqd\" (UniqueName: \"kubernetes.io/projected/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-kube-api-access-dzqqd\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.515891 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.515901 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc9c1-3ddc-4ad1-9df2-03c593f1d536-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.659883 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.668688 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.722078 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpvfr\" (UniqueName: \"kubernetes.io/projected/f76f6c0a-7254-4c76-86c3-e1004cf38303-kube-api-access-gpvfr\") pod \"f76f6c0a-7254-4c76-86c3-e1004cf38303\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.722158 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-combined-ca-bundle\") pod \"f76f6c0a-7254-4c76-86c3-e1004cf38303\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.722359 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-config-data\") pod \"f76f6c0a-7254-4c76-86c3-e1004cf38303\" (UID: \"f76f6c0a-7254-4c76-86c3-e1004cf38303\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.728498 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f76f6c0a-7254-4c76-86c3-e1004cf38303-kube-api-access-gpvfr" (OuterVolumeSpecName: "kube-api-access-gpvfr") pod "f76f6c0a-7254-4c76-86c3-e1004cf38303" (UID: "f76f6c0a-7254-4c76-86c3-e1004cf38303"). InnerVolumeSpecName "kube-api-access-gpvfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.750464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"94afc9c1-3ddc-4ad1-9df2-03c593f1d536","Type":"ContainerDied","Data":"7c0b1ce8170f351d8458ca6d6586f555434faafaa0a62af39ce6252f9fd301ea"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.750515 4811 scope.go:117] "RemoveContainer" containerID="16ecbb402df17bba24033b41f310dbf0a19457f1f7fd2b1097a18cc03319751f" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.750601 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.762177 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-config-data" (OuterVolumeSpecName: "config-data") pod "f76f6c0a-7254-4c76-86c3-e1004cf38303" (UID: "f76f6c0a-7254-4c76-86c3-e1004cf38303"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.762355 4811 generic.go:334] "Generic (PLEG): container finished" podID="156027c6-3cec-4317-8267-eb234c90af40" containerID="6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd" exitCode=0 Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.763783 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"156027c6-3cec-4317-8267-eb234c90af40","Type":"ContainerDied","Data":"6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.766617 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f76f6c0a-7254-4c76-86c3-e1004cf38303" (UID: "f76f6c0a-7254-4c76-86c3-e1004cf38303"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.773406 4811 generic.go:334] "Generic (PLEG): container finished" podID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" exitCode=0 Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.773470 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.773527 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f76f6c0a-7254-4c76-86c3-e1004cf38303","Type":"ContainerDied","Data":"8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.773562 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f76f6c0a-7254-4c76-86c3-e1004cf38303","Type":"ContainerDied","Data":"9161aabcf2a1884c4fdb16c359c2594471405799187bda220493499b15fcb2c3"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.777893 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"23095127-8b86-445a-8c32-1e6bc14bf05e","Type":"ContainerDied","Data":"5d1af227a546798f00c079ebf2fcb81520a200698bcd1d0959a021cd15eea35d"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.778071 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.783875 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-fxxlf_1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e/ovn-controller/0.log" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.783913 4811 generic.go:334] "Generic (PLEG): container finished" podID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerID="ba856df2f3f7c7a7f9e7775dc89b697ebd4dd48e8a805217e5ee26242f8b0270" exitCode=139 Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.784042 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf" event={"ID":"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e","Type":"ContainerDied","Data":"ba856df2f3f7c7a7f9e7775dc89b697ebd4dd48e8a805217e5ee26242f8b0270"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.787665 4811 generic.go:334] "Generic (PLEG): container finished" podID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" exitCode=0 Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.787717 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5158a65-ce89-4a2b-9a19-0e4a6070562a","Type":"ContainerDied","Data":"d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.787740 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5158a65-ce89-4a2b-9a19-0e4a6070562a","Type":"ContainerDied","Data":"742362a3f5bc1738537bdc9aded1dc0550718c399c5f461f7a2ff428fe2ff892"} Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.787787 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.811879 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.825416 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.826056 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-config-data\") pod \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.826094 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-combined-ca-bundle\") pod \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.826130 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgdmz\" (UniqueName: \"kubernetes.io/projected/f5158a65-ce89-4a2b-9a19-0e4a6070562a-kube-api-access-fgdmz\") pod \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\" (UID: \"f5158a65-ce89-4a2b-9a19-0e4a6070562a\") " Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.826512 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.826532 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpvfr\" (UniqueName: \"kubernetes.io/projected/f76f6c0a-7254-4c76-86c3-e1004cf38303-kube-api-access-gpvfr\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.826544 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76f6c0a-7254-4c76-86c3-e1004cf38303-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.827203 4811 scope.go:117] "RemoveContainer" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.837714 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5158a65-ce89-4a2b-9a19-0e4a6070562a-kube-api-access-fgdmz" (OuterVolumeSpecName: "kube-api-access-fgdmz") pod "f5158a65-ce89-4a2b-9a19-0e4a6070562a" (UID: "f5158a65-ce89-4a2b-9a19-0e4a6070562a"). InnerVolumeSpecName "kube-api-access-fgdmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.839113 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.844768 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.864176 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-config-data" (OuterVolumeSpecName: "config-data") pod "f5158a65-ce89-4a2b-9a19-0e4a6070562a" (UID: "f5158a65-ce89-4a2b-9a19-0e4a6070562a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.870868 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.876967 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.878121 4811 scope.go:117] "RemoveContainer" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" Jan 28 16:11:06 crc kubenswrapper[4811]: E0128 16:11:06.878667 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6\": container with ID starting with 8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6 not found: ID does not exist" containerID="8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.878780 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6"} err="failed to get container status \"8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6\": rpc error: code = NotFound desc = could not find container \"8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6\": container with ID starting with 8e97f5379d59be0d7d51607bfdc713017cc666c439edf2b55720cc1ee7a94ec6 not found: ID does not exist" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.878887 4811 scope.go:117] "RemoveContainer" containerID="783bfe55389f6600eee189683b41f4d30b0ec2595be48aebc9462d419fec02fc" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.878831 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5158a65-ce89-4a2b-9a19-0e4a6070562a" (UID: "f5158a65-ce89-4a2b-9a19-0e4a6070562a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.900939 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-fxxlf_1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e/ovn-controller/0.log" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.901217 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.917011 4811 scope.go:117] "RemoveContainer" containerID="28aa321bf0e3ef329793682970b67cddf5dcdb81c24e0b66b553354710937d33" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.935688 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.935882 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5158a65-ce89-4a2b-9a19-0e4a6070562a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:06 crc kubenswrapper[4811]: I0128 16:11:06.935941 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgdmz\" (UniqueName: \"kubernetes.io/projected/f5158a65-ce89-4a2b-9a19-0e4a6070562a-kube-api-access-fgdmz\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.024218 4811 scope.go:117] "RemoveContainer" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.037156 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-log-ovn\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.037488 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwpj2\" (UniqueName: \"kubernetes.io/projected/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-kube-api-access-cwpj2\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.037619 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-combined-ca-bundle\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.037888 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.038101 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-ovn-controller-tls-certs\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.038212 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.038322 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run-ovn\") pod \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\" (UID: \"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.038870 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.038966 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.040557 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run" (OuterVolumeSpecName: "var-run") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.041792 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts" (OuterVolumeSpecName: "scripts") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.042165 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-kube-api-access-cwpj2" (OuterVolumeSpecName: "kube-api-access-cwpj2") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "kube-api-access-cwpj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.099769 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.112666 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" (UID: "1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139696 4811 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139729 4811 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139740 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwpj2\" (UniqueName: \"kubernetes.io/projected/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-kube-api-access-cwpj2\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139752 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139763 4811 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-var-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139770 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.139778 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.174675 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.174709 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.178465 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.178816 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.178861 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.179105 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.180284 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.180343 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.210848 4811 scope.go:117] "RemoveContainer" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" Jan 28 16:11:07 crc kubenswrapper[4811]: E0128 16:11:07.211651 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe\": container with ID starting with d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe not found: ID does not exist" containerID="d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.211703 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe"} err="failed to get container status \"d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe\": rpc error: code = NotFound desc = could not find container \"d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe\": container with ID starting with d9d98a9ef617e0914e95c49e3f4209449a918bf828d1812a874f55e9669c0fbe not found: ID does not exist" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.245162 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.261710 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.289470 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342322 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-config-data-default\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342372 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6mkz\" (UniqueName: \"kubernetes.io/projected/156027c6-3cec-4317-8267-eb234c90af40-kube-api-access-p6mkz\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342408 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342473 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/156027c6-3cec-4317-8267-eb234c90af40-config-data-generated\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342505 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-kolla-config\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342589 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-galera-tls-certs\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342606 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-operator-scripts\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.342645 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-combined-ca-bundle\") pod \"156027c6-3cec-4317-8267-eb234c90af40\" (UID: \"156027c6-3cec-4317-8267-eb234c90af40\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.343522 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/156027c6-3cec-4317-8267-eb234c90af40-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.344003 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.344143 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.347496 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/156027c6-3cec-4317-8267-eb234c90af40-kube-api-access-p6mkz" (OuterVolumeSpecName: "kube-api-access-p6mkz") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "kube-api-access-p6mkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.348406 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.361669 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.388073 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.398117 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "156027c6-3cec-4317-8267-eb234c90af40" (UID: "156027c6-3cec-4317-8267-eb234c90af40"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444417 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444680 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/156027c6-3cec-4317-8267-eb234c90af40-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444807 4811 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444881 4811 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444894 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444908 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/156027c6-3cec-4317-8267-eb234c90af40-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444930 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/156027c6-3cec-4317-8267-eb234c90af40-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.444942 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6mkz\" (UniqueName: \"kubernetes.io/projected/156027c6-3cec-4317-8267-eb234c90af40-kube-api-access-p6mkz\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.461166 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.466149 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.548462 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6sj4\" (UniqueName: \"kubernetes.io/projected/4fca8964-49f3-477e-8b8c-b64a5200e386-kube-api-access-j6sj4\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.548553 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-sg-core-conf-yaml\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.548705 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-scripts\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.548789 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-config-data\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.548844 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-log-httpd\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.548870 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-combined-ca-bundle\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.550065 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-run-httpd\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.550126 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-ceilometer-tls-certs\") pod \"4fca8964-49f3-477e-8b8c-b64a5200e386\" (UID: \"4fca8964-49f3-477e-8b8c-b64a5200e386\") " Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.550302 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.550569 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.551042 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.551068 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fca8964-49f3-477e-8b8c-b64a5200e386-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.551085 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.554089 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fca8964-49f3-477e-8b8c-b64a5200e386-kube-api-access-j6sj4" (OuterVolumeSpecName: "kube-api-access-j6sj4") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "kube-api-access-j6sj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.570189 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-scripts" (OuterVolumeSpecName: "scripts") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.573387 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.590383 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.609666 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.624485 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-config-data" (OuterVolumeSpecName: "config-data") pod "4fca8964-49f3-477e-8b8c-b64a5200e386" (UID: "4fca8964-49f3-477e-8b8c-b64a5200e386"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.652006 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.652055 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.652065 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.652075 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.652086 4811 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fca8964-49f3-477e-8b8c-b64a5200e386-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.652096 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6sj4\" (UniqueName: \"kubernetes.io/projected/4fca8964-49f3-477e-8b8c-b64a5200e386-kube-api-access-j6sj4\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.828147 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-fxxlf_1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e/ovn-controller/0.log" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.828225 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fxxlf" event={"ID":"1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e","Type":"ContainerDied","Data":"86e197af024f90b7f8e4f0b756032a9a09c5fd06241f6ee983b0164d265ca464"} Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.828264 4811 scope.go:117] "RemoveContainer" containerID="ba856df2f3f7c7a7f9e7775dc89b697ebd4dd48e8a805217e5ee26242f8b0270" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.828409 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fxxlf" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.839094 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.839103 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"156027c6-3cec-4317-8267-eb234c90af40","Type":"ContainerDied","Data":"bfd28952dd023fd20d841a82907630237832481c7f70abf5c7a71ef6816f119b"} Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.843585 4811 generic.go:334] "Generic (PLEG): container finished" podID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerID="261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088" exitCode=0 Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.843617 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerDied","Data":"261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088"} Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.843634 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4fca8964-49f3-477e-8b8c-b64a5200e386","Type":"ContainerDied","Data":"90d8526cdbd4e8a0599f410846a1cd7d6f98d1195bbe610cbe6f3713a0ba9b79"} Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.843709 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.875205 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fxxlf"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.886939 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-fxxlf"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.894744 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.903181 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.906897 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.911408 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.958390 4811 scope.go:117] "RemoveContainer" containerID="6cc40c82aa6779ede3cdbda3f05c0f2657e35a4da3ee3cc5d95911b9941c4dcd" Jan 28 16:11:07 crc kubenswrapper[4811]: I0128 16:11:07.979172 4811 scope.go:117] "RemoveContainer" containerID="2737e6ad87982b7277c55b34f07400b82de02f83c09921c823cc3c4906fd86e9" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.004624 4811 scope.go:117] "RemoveContainer" containerID="2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.034808 4811 scope.go:117] "RemoveContainer" containerID="bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.050672 4811 scope.go:117] "RemoveContainer" containerID="261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.067080 4811 scope.go:117] "RemoveContainer" containerID="42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.084303 4811 scope.go:117] "RemoveContainer" containerID="2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1" Jan 28 16:11:08 crc kubenswrapper[4811]: E0128 16:11:08.084658 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1\": container with ID starting with 2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1 not found: ID does not exist" containerID="2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.084689 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1"} err="failed to get container status \"2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1\": rpc error: code = NotFound desc = could not find container \"2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1\": container with ID starting with 2d9c65b2bee43397b8955d9320943cb62ff0d6aec47af0978b1868f4d792a3d1 not found: ID does not exist" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.084712 4811 scope.go:117] "RemoveContainer" containerID="bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963" Jan 28 16:11:08 crc kubenswrapper[4811]: E0128 16:11:08.085004 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963\": container with ID starting with bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963 not found: ID does not exist" containerID="bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.085027 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963"} err="failed to get container status \"bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963\": rpc error: code = NotFound desc = could not find container \"bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963\": container with ID starting with bf5ccb17a1cc4802cfc552544575c5ab0edf7a21d95651488e4c67c1e8d0e963 not found: ID does not exist" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.085041 4811 scope.go:117] "RemoveContainer" containerID="261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088" Jan 28 16:11:08 crc kubenswrapper[4811]: E0128 16:11:08.085297 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088\": container with ID starting with 261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088 not found: ID does not exist" containerID="261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.085316 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088"} err="failed to get container status \"261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088\": rpc error: code = NotFound desc = could not find container \"261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088\": container with ID starting with 261bf598712ddaaf57a16c8cec80e31e139366aba7c731d282911fc0bdab9088 not found: ID does not exist" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.085328 4811 scope.go:117] "RemoveContainer" containerID="42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287" Jan 28 16:11:08 crc kubenswrapper[4811]: E0128 16:11:08.085624 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287\": container with ID starting with 42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287 not found: ID does not exist" containerID="42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.085647 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287"} err="failed to get container status \"42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287\": rpc error: code = NotFound desc = could not find container \"42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287\": container with ID starting with 42f811bb5217eb77366d4a1fe6568a51fd0e36cc0d12bb37a95c3d8b5f28e287 not found: ID does not exist" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.350832 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="156027c6-3cec-4317-8267-eb234c90af40" path="/var/lib/kubelet/pods/156027c6-3cec-4317-8267-eb234c90af40/volumes" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.351479 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" path="/var/lib/kubelet/pods/1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e/volumes" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.352241 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" path="/var/lib/kubelet/pods/23095127-8b86-445a-8c32-1e6bc14bf05e/volumes" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.353394 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" path="/var/lib/kubelet/pods/4fca8964-49f3-477e-8b8c-b64a5200e386/volumes" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.354089 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" path="/var/lib/kubelet/pods/94afc9c1-3ddc-4ad1-9df2-03c593f1d536/volumes" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.355063 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" path="/var/lib/kubelet/pods/f5158a65-ce89-4a2b-9a19-0e4a6070562a/volumes" Jan 28 16:11:08 crc kubenswrapper[4811]: I0128 16:11:08.355627 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" path="/var/lib/kubelet/pods/f76f6c0a-7254-4c76-86c3-e1004cf38303/volumes" Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.173734 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.174757 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.175160 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.175263 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.175846 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.179581 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.181942 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.181984 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:12 crc kubenswrapper[4811]: I0128 16:11:12.339754 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:11:12 crc kubenswrapper[4811]: E0128 16:11:12.340065 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.173789 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.174793 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.174894 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.175922 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.175987 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.176150 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.177339 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:17 crc kubenswrapper[4811]: E0128 16:11:17.177476 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.755700 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866252 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-combined-ca-bundle\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866368 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-public-tls-certs\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866406 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-httpd-config\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866460 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzlwr\" (UniqueName: \"kubernetes.io/projected/73a0ad8a-2cb9-466e-b3e7-251823ea4528-kube-api-access-rzlwr\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866507 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-ovndb-tls-certs\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866531 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-internal-tls-certs\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.866557 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-config\") pod \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\" (UID: \"73a0ad8a-2cb9-466e-b3e7-251823ea4528\") " Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.874056 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.874091 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73a0ad8a-2cb9-466e-b3e7-251823ea4528-kube-api-access-rzlwr" (OuterVolumeSpecName: "kube-api-access-rzlwr") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "kube-api-access-rzlwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.905313 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.907979 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.909717 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-config" (OuterVolumeSpecName: "config") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.916608 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.929955 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "73a0ad8a-2cb9-466e-b3e7-251823ea4528" (UID: "73a0ad8a-2cb9-466e-b3e7-251823ea4528"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979027 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979273 4811 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979406 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979481 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzlwr\" (UniqueName: \"kubernetes.io/projected/73a0ad8a-2cb9-466e-b3e7-251823ea4528-kube-api-access-rzlwr\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979537 4811 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979588 4811 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979639 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/73a0ad8a-2cb9-466e-b3e7-251823ea4528-config\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979580 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86846dcc5c-hkg4d" event={"ID":"73a0ad8a-2cb9-466e-b3e7-251823ea4528","Type":"ContainerDied","Data":"51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b"} Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979542 4811 generic.go:334] "Generic (PLEG): container finished" podID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerID="51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b" exitCode=0 Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979782 4811 scope.go:117] "RemoveContainer" containerID="d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979692 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86846dcc5c-hkg4d" Jan 28 16:11:20 crc kubenswrapper[4811]: I0128 16:11:20.979924 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86846dcc5c-hkg4d" event={"ID":"73a0ad8a-2cb9-466e-b3e7-251823ea4528","Type":"ContainerDied","Data":"94aa501eaff1bffc80939872bbbba936d439b2e914bad0e1f78b161b900664ae"} Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.002121 4811 scope.go:117] "RemoveContainer" containerID="51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b" Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.023919 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-86846dcc5c-hkg4d"] Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.024786 4811 scope.go:117] "RemoveContainer" containerID="d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605" Jan 28 16:11:21 crc kubenswrapper[4811]: E0128 16:11:21.025250 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605\": container with ID starting with d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605 not found: ID does not exist" containerID="d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605" Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.025330 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605"} err="failed to get container status \"d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605\": rpc error: code = NotFound desc = could not find container \"d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605\": container with ID starting with d30f3af4b0041c5e2a1bda1036b2e0f19543d876ef7630815ac0f86fd2173605 not found: ID does not exist" Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.025363 4811 scope.go:117] "RemoveContainer" containerID="51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b" Jan 28 16:11:21 crc kubenswrapper[4811]: E0128 16:11:21.025700 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b\": container with ID starting with 51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b not found: ID does not exist" containerID="51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b" Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.025725 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b"} err="failed to get container status \"51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b\": rpc error: code = NotFound desc = could not find container \"51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b\": container with ID starting with 51a598763a4f9f6e93146ac5d33c8ce2be97bf7c452813033779bf52b0e6819b not found: ID does not exist" Jan 28 16:11:21 crc kubenswrapper[4811]: I0128 16:11:21.029481 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-86846dcc5c-hkg4d"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.173411 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.173955 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.174724 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.174845 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.175296 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.176880 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.178546 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:22 crc kubenswrapper[4811]: E0128 16:11:22.178582 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:22 crc kubenswrapper[4811]: I0128 16:11:22.353851 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" path="/var/lib/kubelet/pods/73a0ad8a-2cb9-466e-b3e7-251823ea4528/volumes" Jan 28 16:11:26 crc kubenswrapper[4811]: I0128 16:11:26.340029 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:11:26 crc kubenswrapper[4811]: E0128 16:11:26.341237 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.173843 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.174403 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.175003 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.175053 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.175621 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.177519 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.181237 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 28 16:11:27 crc kubenswrapper[4811]: E0128 16:11:27.181295 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-wzxwb" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.817572 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.907949 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-combined-ca-bundle\") pod \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.908007 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") pod \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.908067 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-cache\") pod \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.908087 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-lock\") pod \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.908119 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.908157 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nzrl\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-kube-api-access-9nzrl\") pod \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\" (UID: \"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b\") " Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.911470 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-lock" (OuterVolumeSpecName: "lock") pod "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.912673 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-cache" (OuterVolumeSpecName: "cache") pod "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.920377 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-kube-api-access-9nzrl" (OuterVolumeSpecName: "kube-api-access-9nzrl") pod "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b"). InnerVolumeSpecName "kube-api-access-9nzrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.920581 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:28 crc kubenswrapper[4811]: I0128 16:11:28.925620 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "swift") pod "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.009542 4811 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-cache\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.009575 4811 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-lock\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.009609 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.009623 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nzrl\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-kube-api-access-9nzrl\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.009635 4811 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.019826 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wzxwb_8bfbbf41-033c-479e-b625-396378f8afa2/ovs-vswitchd/0.log" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.020644 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.025494 4811 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.099083 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wzxwb_8bfbbf41-033c-479e-b625-396378f8afa2/ovs-vswitchd/0.log" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.099898 4811 generic.go:334] "Generic (PLEG): container finished" podID="8bfbbf41-033c-479e-b625-396378f8afa2" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" exitCode=137 Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.099964 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerDied","Data":"0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf"} Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.099990 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wzxwb" event={"ID":"8bfbbf41-033c-479e-b625-396378f8afa2","Type":"ContainerDied","Data":"6d7f40bd9c1402eada25f27c0958d15150774e9af5936dda2dff719e3daeedb6"} Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.100007 4811 scope.go:117] "RemoveContainer" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.100009 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wzxwb" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.102189 4811 generic.go:334] "Generic (PLEG): container finished" podID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerID="86828187d9cd31239028cb631f45f10cb80918e0ec369e86d8c2a82f84f3b061" exitCode=137 Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.102250 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ff73f2e3-0c2c-4008-bc61-36e65a0ad776","Type":"ContainerDied","Data":"86828187d9cd31239028cb631f45f10cb80918e0ec369e86d8c2a82f84f3b061"} Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.109848 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bfbbf41-033c-479e-b625-396378f8afa2-scripts\") pod \"8bfbbf41-033c-479e-b625-396378f8afa2\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.109882 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-etc-ovs\") pod \"8bfbbf41-033c-479e-b625-396378f8afa2\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.109994 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-log\") pod \"8bfbbf41-033c-479e-b625-396378f8afa2\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110020 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-lib\") pod \"8bfbbf41-033c-479e-b625-396378f8afa2\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110063 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dpb4\" (UniqueName: \"kubernetes.io/projected/8bfbbf41-033c-479e-b625-396378f8afa2-kube-api-access-5dpb4\") pod \"8bfbbf41-033c-479e-b625-396378f8afa2\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110087 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-log" (OuterVolumeSpecName: "var-log") pod "8bfbbf41-033c-479e-b625-396378f8afa2" (UID: "8bfbbf41-033c-479e-b625-396378f8afa2"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110108 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-lib" (OuterVolumeSpecName: "var-lib") pod "8bfbbf41-033c-479e-b625-396378f8afa2" (UID: "8bfbbf41-033c-479e-b625-396378f8afa2"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110118 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "8bfbbf41-033c-479e-b625-396378f8afa2" (UID: "8bfbbf41-033c-479e-b625-396378f8afa2"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110157 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-run\") pod \"8bfbbf41-033c-479e-b625-396378f8afa2\" (UID: \"8bfbbf41-033c-479e-b625-396378f8afa2\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110231 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-run" (OuterVolumeSpecName: "var-run") pod "8bfbbf41-033c-479e-b625-396378f8afa2" (UID: "8bfbbf41-033c-479e-b625-396378f8afa2"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110438 4811 generic.go:334] "Generic (PLEG): container finished" podID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerID="4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47" exitCode=137 Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110463 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47"} Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110485 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"84ae0602-24d6-43f7-84d5-c9aa7ed04d7b","Type":"ContainerDied","Data":"f26372c2e1c249ba75c4cceec86f49d43ffd77081b900992110ba959502c84f1"} Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110493 4811 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-run\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110505 4811 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110530 4811 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-log\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110540 4811 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110548 4811 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bfbbf41-033c-479e-b625-396378f8afa2-var-lib\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110583 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.110883 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bfbbf41-033c-479e-b625-396378f8afa2-scripts" (OuterVolumeSpecName: "scripts") pod "8bfbbf41-033c-479e-b625-396378f8afa2" (UID: "8bfbbf41-033c-479e-b625-396378f8afa2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.112601 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bfbbf41-033c-479e-b625-396378f8afa2-kube-api-access-5dpb4" (OuterVolumeSpecName: "kube-api-access-5dpb4") pod "8bfbbf41-033c-479e-b625-396378f8afa2" (UID: "8bfbbf41-033c-479e-b625-396378f8afa2"). InnerVolumeSpecName "kube-api-access-5dpb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.133791 4811 scope.go:117] "RemoveContainer" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.151486 4811 scope.go:117] "RemoveContainer" containerID="5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.177345 4811 scope.go:117] "RemoveContainer" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.177971 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf\": container with ID starting with 0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf not found: ID does not exist" containerID="0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.178014 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf"} err="failed to get container status \"0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf\": rpc error: code = NotFound desc = could not find container \"0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf\": container with ID starting with 0ff340a32e4bc187020c5b051ade227cf56575c72a89db98472087024a25e2bf not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.178046 4811 scope.go:117] "RemoveContainer" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.178424 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21\": container with ID starting with d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 not found: ID does not exist" containerID="d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.178492 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21"} err="failed to get container status \"d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21\": rpc error: code = NotFound desc = could not find container \"d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21\": container with ID starting with d5039b48ec59ce95104ae4bf9c2c1bedd842e192ad3829d80ec0bc9084c24f21 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.178524 4811 scope.go:117] "RemoveContainer" containerID="5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.178860 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05\": container with ID starting with 5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05 not found: ID does not exist" containerID="5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.178896 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05"} err="failed to get container status \"5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05\": rpc error: code = NotFound desc = could not find container \"5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05\": container with ID starting with 5b49862714893762fa981a0ba02a64086097e365120730fcea92cbe6bae9fa05 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.178922 4811 scope.go:117] "RemoveContainer" containerID="4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.190220 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" (UID: "84ae0602-24d6-43f7-84d5-c9aa7ed04d7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.198659 4811 scope.go:117] "RemoveContainer" containerID="07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.212390 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dpb4\" (UniqueName: \"kubernetes.io/projected/8bfbbf41-033c-479e-b625-396378f8afa2-kube-api-access-5dpb4\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.212471 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bfbbf41-033c-479e-b625-396378f8afa2-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.212503 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.229596 4811 scope.go:117] "RemoveContainer" containerID="2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.249742 4811 scope.go:117] "RemoveContainer" containerID="a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.275502 4811 scope.go:117] "RemoveContainer" containerID="df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.296159 4811 scope.go:117] "RemoveContainer" containerID="b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.335754 4811 scope.go:117] "RemoveContainer" containerID="037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.384695 4811 scope.go:117] "RemoveContainer" containerID="de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.415418 4811 scope.go:117] "RemoveContainer" containerID="4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.432383 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-wzxwb"] Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.432583 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.438336 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-wzxwb"] Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.447570 4811 scope.go:117] "RemoveContainer" containerID="edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.461945 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.468253 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.483764 4811 scope.go:117] "RemoveContainer" containerID="bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.522538 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data-custom\") pod \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.522815 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data\") pod \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.522923 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-scripts\") pod \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.523103 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-etc-machine-id\") pod \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.523208 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxmsm\" (UniqueName: \"kubernetes.io/projected/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-kube-api-access-mxmsm\") pod \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.523299 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-combined-ca-bundle\") pod \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\" (UID: \"ff73f2e3-0c2c-4008-bc61-36e65a0ad776\") " Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.523163 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ff73f2e3-0c2c-4008-bc61-36e65a0ad776" (UID: "ff73f2e3-0c2c-4008-bc61-36e65a0ad776"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.523659 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.526011 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-scripts" (OuterVolumeSpecName: "scripts") pod "ff73f2e3-0c2c-4008-bc61-36e65a0ad776" (UID: "ff73f2e3-0c2c-4008-bc61-36e65a0ad776"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.526472 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ff73f2e3-0c2c-4008-bc61-36e65a0ad776" (UID: "ff73f2e3-0c2c-4008-bc61-36e65a0ad776"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.528294 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-kube-api-access-mxmsm" (OuterVolumeSpecName: "kube-api-access-mxmsm") pod "ff73f2e3-0c2c-4008-bc61-36e65a0ad776" (UID: "ff73f2e3-0c2c-4008-bc61-36e65a0ad776"). InnerVolumeSpecName "kube-api-access-mxmsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.542534 4811 scope.go:117] "RemoveContainer" containerID="91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.558636 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff73f2e3-0c2c-4008-bc61-36e65a0ad776" (UID: "ff73f2e3-0c2c-4008-bc61-36e65a0ad776"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.559108 4811 scope.go:117] "RemoveContainer" containerID="37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.574176 4811 scope.go:117] "RemoveContainer" containerID="417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.594778 4811 scope.go:117] "RemoveContainer" containerID="fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.610308 4811 scope.go:117] "RemoveContainer" containerID="4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.610662 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data" (OuterVolumeSpecName: "config-data") pod "ff73f2e3-0c2c-4008-bc61-36e65a0ad776" (UID: "ff73f2e3-0c2c-4008-bc61-36e65a0ad776"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.610908 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47\": container with ID starting with 4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47 not found: ID does not exist" containerID="4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.611025 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47"} err="failed to get container status \"4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47\": rpc error: code = NotFound desc = could not find container \"4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47\": container with ID starting with 4816fdef8ee72288ed6f44ab40270249e2109c09ec8ad6ae0094d790712afe47 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.611122 4811 scope.go:117] "RemoveContainer" containerID="07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.611557 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3\": container with ID starting with 07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3 not found: ID does not exist" containerID="07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.611606 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3"} err="failed to get container status \"07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3\": rpc error: code = NotFound desc = could not find container \"07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3\": container with ID starting with 07584449cf1e8492fdd30850781850c18a6beb2ad58ae4c5d57dde647dfa08c3 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.611639 4811 scope.go:117] "RemoveContainer" containerID="2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.611974 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8\": container with ID starting with 2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8 not found: ID does not exist" containerID="2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.612070 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8"} err="failed to get container status \"2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8\": rpc error: code = NotFound desc = could not find container \"2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8\": container with ID starting with 2ff7e313dd106d84ce405602fc4e33fb5da9f55f4e92c636152ab812ff49a6b8 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.612157 4811 scope.go:117] "RemoveContainer" containerID="a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.612616 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44\": container with ID starting with a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44 not found: ID does not exist" containerID="a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.612684 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44"} err="failed to get container status \"a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44\": rpc error: code = NotFound desc = could not find container \"a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44\": container with ID starting with a455d95149982d555c46bea1485d4f01300e6c7c316898ea0120eeec661bed44 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.612731 4811 scope.go:117] "RemoveContainer" containerID="df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.613052 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365\": container with ID starting with df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365 not found: ID does not exist" containerID="df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.613085 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365"} err="failed to get container status \"df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365\": rpc error: code = NotFound desc = could not find container \"df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365\": container with ID starting with df884a847f6dadbb8aca00b6654043b6c0784215f4e1b939b394aa4eb681e365 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.613102 4811 scope.go:117] "RemoveContainer" containerID="b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.613422 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac\": container with ID starting with b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac not found: ID does not exist" containerID="b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.613599 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac"} err="failed to get container status \"b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac\": rpc error: code = NotFound desc = could not find container \"b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac\": container with ID starting with b2aab4d8e8aa65a46a59daae5fc92084fdb3171e37ce4f82301d54bc92d83bac not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.613629 4811 scope.go:117] "RemoveContainer" containerID="037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.613923 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12\": container with ID starting with 037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12 not found: ID does not exist" containerID="037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.613954 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12"} err="failed to get container status \"037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12\": rpc error: code = NotFound desc = could not find container \"037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12\": container with ID starting with 037d8c9004a401b696bf1926d9cfc0d899e6a6f211a5a2b6dab9e90a49238a12 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.613972 4811 scope.go:117] "RemoveContainer" containerID="de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.614241 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea\": container with ID starting with de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea not found: ID does not exist" containerID="de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.614279 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea"} err="failed to get container status \"de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea\": rpc error: code = NotFound desc = could not find container \"de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea\": container with ID starting with de17a3ae96cd44c5e7539204c9ecb4a99908a1d59c0cf7b1b30906afe8cca2ea not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.614302 4811 scope.go:117] "RemoveContainer" containerID="4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.614801 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd\": container with ID starting with 4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd not found: ID does not exist" containerID="4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.614854 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd"} err="failed to get container status \"4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd\": rpc error: code = NotFound desc = could not find container \"4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd\": container with ID starting with 4c641018196714d8caf4ab6b48967eb4bb8ff1d06fab1301e8b3ab46913539dd not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.614892 4811 scope.go:117] "RemoveContainer" containerID="edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.615195 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa\": container with ID starting with edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa not found: ID does not exist" containerID="edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.615288 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa"} err="failed to get container status \"edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa\": rpc error: code = NotFound desc = could not find container \"edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa\": container with ID starting with edf4c2725cabb62cf88b0f7f7190df88433ba5987d5a8be69c4b5a42e602f8aa not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.615372 4811 scope.go:117] "RemoveContainer" containerID="bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.615838 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9\": container with ID starting with bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9 not found: ID does not exist" containerID="bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.615869 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9"} err="failed to get container status \"bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9\": rpc error: code = NotFound desc = could not find container \"bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9\": container with ID starting with bbba77e73ba1d8f7f46e97a42a8636a5c7ea1703f7da10b9daf21ef5e23f03e9 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.615893 4811 scope.go:117] "RemoveContainer" containerID="91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.616178 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa\": container with ID starting with 91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa not found: ID does not exist" containerID="91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.616269 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa"} err="failed to get container status \"91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa\": rpc error: code = NotFound desc = could not find container \"91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa\": container with ID starting with 91e208e8d04b20770d870040f31d89e907527f347a650e9696cbc4d272f573aa not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.616348 4811 scope.go:117] "RemoveContainer" containerID="37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.616785 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222\": container with ID starting with 37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222 not found: ID does not exist" containerID="37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.616822 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222"} err="failed to get container status \"37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222\": rpc error: code = NotFound desc = could not find container \"37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222\": container with ID starting with 37e7bde4db551b38403da3cfc7c146a5e67d421d13ffa40d87ce60bbfdaef222 not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.616843 4811 scope.go:117] "RemoveContainer" containerID="417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.617252 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a\": container with ID starting with 417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a not found: ID does not exist" containerID="417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.617302 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a"} err="failed to get container status \"417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a\": rpc error: code = NotFound desc = could not find container \"417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a\": container with ID starting with 417e8d88bc175d3ce7bec37395bdbe9c0f310c931fbd100484ebab310a34e41a not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.617332 4811 scope.go:117] "RemoveContainer" containerID="fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb" Jan 28 16:11:29 crc kubenswrapper[4811]: E0128 16:11:29.617666 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb\": container with ID starting with fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb not found: ID does not exist" containerID="fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.617758 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb"} err="failed to get container status \"fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb\": rpc error: code = NotFound desc = could not find container \"fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb\": container with ID starting with fa14b738e3c3edb62ca81416e8099d28a73bafc1416ebecc67cb280bd16a3afb not found: ID does not exist" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.625039 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.625082 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.625103 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.625122 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:29 crc kubenswrapper[4811]: I0128 16:11:29.625141 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxmsm\" (UniqueName: \"kubernetes.io/projected/ff73f2e3-0c2c-4008-bc61-36e65a0ad776-kube-api-access-mxmsm\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.132103 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.132146 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ff73f2e3-0c2c-4008-bc61-36e65a0ad776","Type":"ContainerDied","Data":"ade24fd04f1a6f0a928556e126cd7ef3c131908931c7e5e2d8a5d2e05e6ccf89"} Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.132854 4811 scope.go:117] "RemoveContainer" containerID="d1bc0298c4d4006930b0858e1c0d43af29a7daf4a41eaa0feae8d160b91e3120" Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.166184 4811 scope.go:117] "RemoveContainer" containerID="86828187d9cd31239028cb631f45f10cb80918e0ec369e86d8c2a82f84f3b061" Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.185352 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.195074 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.349893 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" path="/var/lib/kubelet/pods/84ae0602-24d6-43f7-84d5-c9aa7ed04d7b/volumes" Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.352678 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" path="/var/lib/kubelet/pods/8bfbbf41-033c-479e-b625-396378f8afa2/volumes" Jan 28 16:11:30 crc kubenswrapper[4811]: I0128 16:11:30.353629 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" path="/var/lib/kubelet/pods/ff73f2e3-0c2c-4008-bc61-36e65a0ad776/volumes" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.481089 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zrj65"] Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482544 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" containerName="nova-cell1-conductor-conductor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482568 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" containerName="nova-cell1-conductor-conductor" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482589 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-metadata" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482598 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-metadata" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482617 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482627 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482638 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482647 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482662 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="galera" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482671 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="galera" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482687 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-reaper" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482696 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-reaper" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482714 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482723 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482737 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f80333f7-9036-49d9-8c68-03e4ef8f9ee8" containerName="kube-state-metrics" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482745 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f80333f7-9036-49d9-8c68-03e4ef8f9ee8" containerName="kube-state-metrics" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482762 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482772 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482784 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482792 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482803 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="ovn-northd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482811 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="ovn-northd" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482821 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482829 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482844 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" containerName="mariadb-account-create-update" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482852 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" containerName="mariadb-account-create-update" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482868 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482877 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482888 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482898 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482916 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482925 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-api" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482942 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482951 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482964 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482974 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.482984 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="probe" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.482993 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="probe" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483008 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-expirer" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483016 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-expirer" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483030 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-updater" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483041 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-updater" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483057 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483065 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483080 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="setup-container" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483089 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="setup-container" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483100 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="rsync" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483109 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="rsync" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483119 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerName="nova-scheduler-scheduler" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483128 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerName="nova-scheduler-scheduler" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483143 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="swift-recon-cron" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483151 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="swift-recon-cron" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483166 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server-init" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483188 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server-init" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483200 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483209 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483222 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="rabbitmq" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483231 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="rabbitmq" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483242 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" containerName="memcached" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483251 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" containerName="memcached" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483266 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="rabbitmq" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483275 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="rabbitmq" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483287 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="proxy-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483296 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="proxy-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483311 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483320 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483334 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483344 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483357 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483366 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483378 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483388 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-server" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483401 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483410 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483424 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483451 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483463 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483472 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-api" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483482 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="cinder-scheduler" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483491 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="cinder-scheduler" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483500 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483509 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483521 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483530 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483542 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483550 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483560 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483579 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-server" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483597 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483605 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483620 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-notification-agent" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483629 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-notification-agent" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483643 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483651 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483664 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483672 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-server" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483685 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="sg-core" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483697 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="sg-core" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483714 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483723 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-api" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483739 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerName="mysql-bootstrap" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483749 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerName="mysql-bootstrap" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483759 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerName="galera" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483769 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerName="galera" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483785 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-central-agent" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483794 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-central-agent" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483807 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483815 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483828 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e10c1c14-4c1a-445e-9c98-1f0b6b334802" containerName="keystone-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483838 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e10c1c14-4c1a-445e-9c98-1f0b6b334802" containerName="keystone-api" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483869 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483879 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483892 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-updater" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483902 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-updater" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483916 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="mysql-bootstrap" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483926 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="mysql-bootstrap" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483940 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerName="nova-cell0-conductor-conductor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483949 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerName="nova-cell0-conductor-conductor" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483962 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483970 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-log" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.483985 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="setup-container" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.483994 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="setup-container" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.484007 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="openstack-network-exporter" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484017 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="openstack-network-exporter" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484209 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484221 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484238 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="ovn-northd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484248 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484270 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dd44d16-5b30-493c-9dd2-1ba856a4393a" containerName="openstack-network-exporter" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484286 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35795d5-ffac-4851-914a-00dc84496f91" containerName="glance-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484296 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484311 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484327 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484341 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484355 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484371 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="156027c6-3cec-4317-8267-eb234c90af40" containerName="galera" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484384 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484401 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f80333f7-9036-49d9-8c68-03e4ef8f9ee8" containerName="kube-state-metrics" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484419 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5158a65-ce89-4a2b-9a19-0e4a6070562a" containerName="nova-scheduler-scheduler" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484454 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f76f6c0a-7254-4c76-86c3-e1004cf38303" containerName="nova-cell0-conductor-conductor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484465 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" containerName="mariadb-account-create-update" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484478 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-updater" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484517 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e10c1c14-4c1a-445e-9c98-1f0b6b334802" containerName="keystone-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484533 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="swift-recon-cron" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484544 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" containerName="mariadb-account-create-update" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484558 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-central-agent" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484567 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484579 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484590 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ad4ddb3-372a-4f21-9370-5ef2d74c4d9e" containerName="ovn-controller" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484601 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="probe" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484614 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="proxy-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484624 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484641 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0138ad61-fb17-46c7-bdd3-c65f15e2e186" containerName="barbican-worker" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484650 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a0ad8a-2cb9-466e-b3e7-251823ea4528" containerName="neutron-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484659 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="94afc9c1-3ddc-4ad1-9df2-03c593f1d536" containerName="nova-cell1-conductor-conductor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484670 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d6fdf2-a4f1-4f30-b621-31877f96868f" containerName="nova-metadata-metadata" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484683 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484695 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="rsync" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484705 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484718 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-reaper" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484731 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b7599aa-7114-46c9-bf38-b6d735b75326" containerName="rabbitmq" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484741 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a4cc6d7-4f66-4d66-8d8c-c305c3e7977b" containerName="galera" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484755 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="23095127-8b86-445a-8c32-1e6bc14bf05e" containerName="rabbitmq" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484765 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484778 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484790 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="622ebcc7-b645-4db6-86c0-3546523fb7c7" containerName="barbican-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484801 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff73f2e3-0c2c-4008-bc61-36e65a0ad776" containerName="cinder-scheduler" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484812 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84b57c00-a800-4b82-98c7-8ebcc04c2ab6" containerName="memcached" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484822 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovsdb-server" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484832 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-replicator" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484845 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484858 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ddd574-84b9-4065-9d72-5183fe430d4d" containerName="cinder-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484872 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="ceilometer-notification-agent" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484937 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="object-expirer" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484954 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="container-updater" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484968 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="54d8044d-232b-4d32-a2ed-fa2520b6513f" containerName="glance-httpd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484978 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fca8964-49f3-477e-8b8c-b64a5200e386" containerName="sg-core" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.484990 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bfbbf41-033c-479e-b625-396378f8afa2" containerName="ovs-vswitchd" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.485003 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d75f7765-1a6b-4bb5-819a-6891694a29c8" containerName="nova-api-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.485016 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1761893b-d911-4596-b0b3-ce2d25d0384f" containerName="barbican-keystone-listener" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.485029 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a4fd4e-29a2-464f-aca7-3f856ed15221" containerName="placement-log" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.485043 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ae0602-24d6-43f7-84d5-c9aa7ed04d7b" containerName="account-auditor" Jan 28 16:11:36 crc kubenswrapper[4811]: E0128 16:11:36.485255 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" containerName="mariadb-account-create-update" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.485267 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ab1b57-166e-4409-8b71-0bb922787623" containerName="mariadb-account-create-update" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.486703 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.495308 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zrj65"] Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.524922 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pqzp\" (UniqueName: \"kubernetes.io/projected/00ab43ae-fc05-4652-9ccb-6e32a47742bb-kube-api-access-4pqzp\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.525245 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-catalog-content\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.525295 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-utilities\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.626852 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-catalog-content\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.626895 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-utilities\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.626941 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pqzp\" (UniqueName: \"kubernetes.io/projected/00ab43ae-fc05-4652-9ccb-6e32a47742bb-kube-api-access-4pqzp\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.627320 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-catalog-content\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.627536 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-utilities\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.651172 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pqzp\" (UniqueName: \"kubernetes.io/projected/00ab43ae-fc05-4652-9ccb-6e32a47742bb-kube-api-access-4pqzp\") pod \"redhat-marketplace-zrj65\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:36 crc kubenswrapper[4811]: I0128 16:11:36.818074 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:37 crc kubenswrapper[4811]: I0128 16:11:37.471125 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zrj65"] Jan 28 16:11:38 crc kubenswrapper[4811]: I0128 16:11:38.209373 4811 generic.go:334] "Generic (PLEG): container finished" podID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerID="1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e" exitCode=0 Jan 28 16:11:38 crc kubenswrapper[4811]: I0128 16:11:38.209416 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zrj65" event={"ID":"00ab43ae-fc05-4652-9ccb-6e32a47742bb","Type":"ContainerDied","Data":"1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e"} Jan 28 16:11:38 crc kubenswrapper[4811]: I0128 16:11:38.209464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zrj65" event={"ID":"00ab43ae-fc05-4652-9ccb-6e32a47742bb","Type":"ContainerStarted","Data":"61ac45ae913eb023e6d016333f487466750039b27dbe0d5f979d4ea99ddbc6ce"} Jan 28 16:11:38 crc kubenswrapper[4811]: I0128 16:11:38.343585 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:11:38 crc kubenswrapper[4811]: E0128 16:11:38.344082 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:11:40 crc kubenswrapper[4811]: I0128 16:11:40.235884 4811 generic.go:334] "Generic (PLEG): container finished" podID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerID="8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8" exitCode=0 Jan 28 16:11:40 crc kubenswrapper[4811]: I0128 16:11:40.235983 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zrj65" event={"ID":"00ab43ae-fc05-4652-9ccb-6e32a47742bb","Type":"ContainerDied","Data":"8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8"} Jan 28 16:11:41 crc kubenswrapper[4811]: I0128 16:11:41.251985 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zrj65" event={"ID":"00ab43ae-fc05-4652-9ccb-6e32a47742bb","Type":"ContainerStarted","Data":"a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4"} Jan 28 16:11:41 crc kubenswrapper[4811]: I0128 16:11:41.279190 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zrj65" podStartSLOduration=2.751221791 podStartE2EDuration="5.279163812s" podCreationTimestamp="2026-01-28 16:11:36 +0000 UTC" firstStartedPulling="2026-01-28 16:11:38.211620522 +0000 UTC m=+1590.965984125" lastFinishedPulling="2026-01-28 16:11:40.739562563 +0000 UTC m=+1593.493926146" observedRunningTime="2026-01-28 16:11:41.272573021 +0000 UTC m=+1594.026936644" watchObservedRunningTime="2026-01-28 16:11:41.279163812 +0000 UTC m=+1594.033527405" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.258914 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fplk9"] Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.260921 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.288588 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fplk9"] Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.326693 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-catalog-content\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.326751 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq59k\" (UniqueName: \"kubernetes.io/projected/58741767-826e-4b8a-bca6-6ed6a559d01f-kube-api-access-rq59k\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.326807 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-utilities\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.428263 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-catalog-content\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.428648 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq59k\" (UniqueName: \"kubernetes.io/projected/58741767-826e-4b8a-bca6-6ed6a559d01f-kube-api-access-rq59k\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.428898 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-utilities\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.428925 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-catalog-content\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.429384 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-utilities\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.451224 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq59k\" (UniqueName: \"kubernetes.io/projected/58741767-826e-4b8a-bca6-6ed6a559d01f-kube-api-access-rq59k\") pod \"certified-operators-fplk9\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:43 crc kubenswrapper[4811]: I0128 16:11:43.585912 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:44 crc kubenswrapper[4811]: I0128 16:11:44.053937 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fplk9"] Jan 28 16:11:44 crc kubenswrapper[4811]: I0128 16:11:44.283385 4811 generic.go:334] "Generic (PLEG): container finished" podID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerID="837183cbefa769a7f76472b3e59780ff53a444f4c86ee0300f96f88ca0885ed4" exitCode=0 Jan 28 16:11:44 crc kubenswrapper[4811]: I0128 16:11:44.283454 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerDied","Data":"837183cbefa769a7f76472b3e59780ff53a444f4c86ee0300f96f88ca0885ed4"} Jan 28 16:11:44 crc kubenswrapper[4811]: I0128 16:11:44.283485 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerStarted","Data":"dfa863d1cbdcc5088aa4255b7b6432a84e8fdb0bd22ef2f6771e5cc2003d0964"} Jan 28 16:11:45 crc kubenswrapper[4811]: I0128 16:11:45.291395 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerStarted","Data":"0e8360a6028deeca28873d52779aaf537b2085394730eebd446eddc0ee8d7f9b"} Jan 28 16:11:46 crc kubenswrapper[4811]: I0128 16:11:46.309641 4811 generic.go:334] "Generic (PLEG): container finished" podID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerID="0e8360a6028deeca28873d52779aaf537b2085394730eebd446eddc0ee8d7f9b" exitCode=0 Jan 28 16:11:46 crc kubenswrapper[4811]: I0128 16:11:46.309695 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerDied","Data":"0e8360a6028deeca28873d52779aaf537b2085394730eebd446eddc0ee8d7f9b"} Jan 28 16:11:46 crc kubenswrapper[4811]: I0128 16:11:46.819016 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:46 crc kubenswrapper[4811]: I0128 16:11:46.819488 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:46 crc kubenswrapper[4811]: I0128 16:11:46.861965 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:47 crc kubenswrapper[4811]: I0128 16:11:47.383388 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:48 crc kubenswrapper[4811]: I0128 16:11:48.337652 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerStarted","Data":"30e53500cdd049e3b9047b28b56908db9eef90ed06da13919b9bb4daa6861f77"} Jan 28 16:11:48 crc kubenswrapper[4811]: I0128 16:11:48.366632 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fplk9" podStartSLOduration=2.073896229 podStartE2EDuration="5.36660571s" podCreationTimestamp="2026-01-28 16:11:43 +0000 UTC" firstStartedPulling="2026-01-28 16:11:44.285566783 +0000 UTC m=+1597.039930366" lastFinishedPulling="2026-01-28 16:11:47.578276254 +0000 UTC m=+1600.332639847" observedRunningTime="2026-01-28 16:11:48.356863222 +0000 UTC m=+1601.111226815" watchObservedRunningTime="2026-01-28 16:11:48.36660571 +0000 UTC m=+1601.120969333" Jan 28 16:11:49 crc kubenswrapper[4811]: I0128 16:11:49.232627 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zrj65"] Jan 28 16:11:50 crc kubenswrapper[4811]: I0128 16:11:50.341024 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:11:50 crc kubenswrapper[4811]: E0128 16:11:50.341361 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:11:50 crc kubenswrapper[4811]: I0128 16:11:50.356020 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zrj65" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="registry-server" containerID="cri-o://a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4" gracePeriod=2 Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.269735 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.366493 4811 generic.go:334] "Generic (PLEG): container finished" podID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerID="a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4" exitCode=0 Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.366531 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zrj65" event={"ID":"00ab43ae-fc05-4652-9ccb-6e32a47742bb","Type":"ContainerDied","Data":"a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4"} Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.366554 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zrj65" event={"ID":"00ab43ae-fc05-4652-9ccb-6e32a47742bb","Type":"ContainerDied","Data":"61ac45ae913eb023e6d016333f487466750039b27dbe0d5f979d4ea99ddbc6ce"} Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.366569 4811 scope.go:117] "RemoveContainer" containerID="a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.366584 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zrj65" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.398131 4811 scope.go:117] "RemoveContainer" containerID="8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.420646 4811 scope.go:117] "RemoveContainer" containerID="1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.439408 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pqzp\" (UniqueName: \"kubernetes.io/projected/00ab43ae-fc05-4652-9ccb-6e32a47742bb-kube-api-access-4pqzp\") pod \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.439501 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-utilities\") pod \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.439529 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-catalog-content\") pod \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\" (UID: \"00ab43ae-fc05-4652-9ccb-6e32a47742bb\") " Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.440814 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-utilities" (OuterVolumeSpecName: "utilities") pod "00ab43ae-fc05-4652-9ccb-6e32a47742bb" (UID: "00ab43ae-fc05-4652-9ccb-6e32a47742bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.444722 4811 scope.go:117] "RemoveContainer" containerID="a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4" Jan 28 16:11:51 crc kubenswrapper[4811]: E0128 16:11:51.445830 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4\": container with ID starting with a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4 not found: ID does not exist" containerID="a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.445881 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4"} err="failed to get container status \"a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4\": rpc error: code = NotFound desc = could not find container \"a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4\": container with ID starting with a657407a2500795771827527cd4f3b8a2be3f6e827137f48e61aaba3b70e24c4 not found: ID does not exist" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.445913 4811 scope.go:117] "RemoveContainer" containerID="8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8" Jan 28 16:11:51 crc kubenswrapper[4811]: E0128 16:11:51.446256 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8\": container with ID starting with 8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8 not found: ID does not exist" containerID="8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.446297 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8"} err="failed to get container status \"8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8\": rpc error: code = NotFound desc = could not find container \"8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8\": container with ID starting with 8197cd4300d0092d17eeb915257a74d494c7f7bc2e0a69d1df6e75633a46d7c8 not found: ID does not exist" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.446318 4811 scope.go:117] "RemoveContainer" containerID="1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.446308 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00ab43ae-fc05-4652-9ccb-6e32a47742bb-kube-api-access-4pqzp" (OuterVolumeSpecName: "kube-api-access-4pqzp") pod "00ab43ae-fc05-4652-9ccb-6e32a47742bb" (UID: "00ab43ae-fc05-4652-9ccb-6e32a47742bb"). InnerVolumeSpecName "kube-api-access-4pqzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:51 crc kubenswrapper[4811]: E0128 16:11:51.446733 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e\": container with ID starting with 1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e not found: ID does not exist" containerID="1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.446758 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e"} err="failed to get container status \"1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e\": rpc error: code = NotFound desc = could not find container \"1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e\": container with ID starting with 1194ebc1837e9b3690e02af5c8a2ff268f19c44e4315d2661a8a8ada2821a79e not found: ID does not exist" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.479889 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00ab43ae-fc05-4652-9ccb-6e32a47742bb" (UID: "00ab43ae-fc05-4652-9ccb-6e32a47742bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.541064 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pqzp\" (UniqueName: \"kubernetes.io/projected/00ab43ae-fc05-4652-9ccb-6e32a47742bb-kube-api-access-4pqzp\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.541109 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.541120 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00ab43ae-fc05-4652-9ccb-6e32a47742bb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.703318 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zrj65"] Jan 28 16:11:51 crc kubenswrapper[4811]: I0128 16:11:51.708770 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zrj65"] Jan 28 16:11:52 crc kubenswrapper[4811]: I0128 16:11:52.347878 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" path="/var/lib/kubelet/pods/00ab43ae-fc05-4652-9ccb-6e32a47742bb/volumes" Jan 28 16:11:53 crc kubenswrapper[4811]: I0128 16:11:53.586885 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:53 crc kubenswrapper[4811]: I0128 16:11:53.586986 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:53 crc kubenswrapper[4811]: I0128 16:11:53.658010 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:54 crc kubenswrapper[4811]: I0128 16:11:54.468408 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:55 crc kubenswrapper[4811]: I0128 16:11:55.434846 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fplk9"] Jan 28 16:11:56 crc kubenswrapper[4811]: I0128 16:11:56.412601 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fplk9" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="registry-server" containerID="cri-o://30e53500cdd049e3b9047b28b56908db9eef90ed06da13919b9bb4daa6861f77" gracePeriod=2 Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.422360 4811 generic.go:334] "Generic (PLEG): container finished" podID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerID="30e53500cdd049e3b9047b28b56908db9eef90ed06da13919b9bb4daa6861f77" exitCode=0 Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.422417 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerDied","Data":"30e53500cdd049e3b9047b28b56908db9eef90ed06da13919b9bb4daa6861f77"} Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.624843 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.643271 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-catalog-content\") pod \"58741767-826e-4b8a-bca6-6ed6a559d01f\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.643342 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-utilities\") pod \"58741767-826e-4b8a-bca6-6ed6a559d01f\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.643406 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq59k\" (UniqueName: \"kubernetes.io/projected/58741767-826e-4b8a-bca6-6ed6a559d01f-kube-api-access-rq59k\") pod \"58741767-826e-4b8a-bca6-6ed6a559d01f\" (UID: \"58741767-826e-4b8a-bca6-6ed6a559d01f\") " Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.644352 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-utilities" (OuterVolumeSpecName: "utilities") pod "58741767-826e-4b8a-bca6-6ed6a559d01f" (UID: "58741767-826e-4b8a-bca6-6ed6a559d01f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.657168 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58741767-826e-4b8a-bca6-6ed6a559d01f-kube-api-access-rq59k" (OuterVolumeSpecName: "kube-api-access-rq59k") pod "58741767-826e-4b8a-bca6-6ed6a559d01f" (UID: "58741767-826e-4b8a-bca6-6ed6a559d01f"). InnerVolumeSpecName "kube-api-access-rq59k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.695270 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58741767-826e-4b8a-bca6-6ed6a559d01f" (UID: "58741767-826e-4b8a-bca6-6ed6a559d01f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.745202 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.745240 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58741767-826e-4b8a-bca6-6ed6a559d01f-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:57 crc kubenswrapper[4811]: I0128 16:11:57.745254 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq59k\" (UniqueName: \"kubernetes.io/projected/58741767-826e-4b8a-bca6-6ed6a559d01f-kube-api-access-rq59k\") on node \"crc\" DevicePath \"\"" Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.433166 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fplk9" event={"ID":"58741767-826e-4b8a-bca6-6ed6a559d01f","Type":"ContainerDied","Data":"dfa863d1cbdcc5088aa4255b7b6432a84e8fdb0bd22ef2f6771e5cc2003d0964"} Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.433220 4811 scope.go:117] "RemoveContainer" containerID="30e53500cdd049e3b9047b28b56908db9eef90ed06da13919b9bb4daa6861f77" Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.433340 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fplk9" Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.454417 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fplk9"] Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.459301 4811 scope.go:117] "RemoveContainer" containerID="0e8360a6028deeca28873d52779aaf537b2085394730eebd446eddc0ee8d7f9b" Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.463128 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fplk9"] Jan 28 16:11:58 crc kubenswrapper[4811]: I0128 16:11:58.484552 4811 scope.go:117] "RemoveContainer" containerID="837183cbefa769a7f76472b3e59780ff53a444f4c86ee0300f96f88ca0885ed4" Jan 28 16:12:00 crc kubenswrapper[4811]: I0128 16:12:00.354173 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" path="/var/lib/kubelet/pods/58741767-826e-4b8a-bca6-6ed6a559d01f/volumes" Jan 28 16:12:02 crc kubenswrapper[4811]: I0128 16:12:02.339851 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:12:02 crc kubenswrapper[4811]: E0128 16:12:02.340391 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:12:14 crc kubenswrapper[4811]: I0128 16:12:14.339959 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:12:14 crc kubenswrapper[4811]: E0128 16:12:14.340678 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.275984 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gp6vx"] Jan 28 16:12:19 crc kubenswrapper[4811]: E0128 16:12:19.276760 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="extract-content" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.276777 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="extract-content" Jan 28 16:12:19 crc kubenswrapper[4811]: E0128 16:12:19.276802 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="extract-utilities" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.276811 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="extract-utilities" Jan 28 16:12:19 crc kubenswrapper[4811]: E0128 16:12:19.276830 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="extract-content" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.276839 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="extract-content" Jan 28 16:12:19 crc kubenswrapper[4811]: E0128 16:12:19.276851 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="extract-utilities" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.276859 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="extract-utilities" Jan 28 16:12:19 crc kubenswrapper[4811]: E0128 16:12:19.276871 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="registry-server" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.276878 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="registry-server" Jan 28 16:12:19 crc kubenswrapper[4811]: E0128 16:12:19.276891 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="registry-server" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.276898 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="registry-server" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.277048 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="00ab43ae-fc05-4652-9ccb-6e32a47742bb" containerName="registry-server" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.277067 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="58741767-826e-4b8a-bca6-6ed6a559d01f" containerName="registry-server" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.277976 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.290107 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gp6vx"] Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.442513 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-utilities\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.442566 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q4vm\" (UniqueName: \"kubernetes.io/projected/7c02d831-3666-4989-b818-32a5a489fab0-kube-api-access-4q4vm\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.442612 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-catalog-content\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.544146 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-utilities\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.544510 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q4vm\" (UniqueName: \"kubernetes.io/projected/7c02d831-3666-4989-b818-32a5a489fab0-kube-api-access-4q4vm\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.544647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-catalog-content\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.544686 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-utilities\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.544884 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-catalog-content\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.563164 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q4vm\" (UniqueName: \"kubernetes.io/projected/7c02d831-3666-4989-b818-32a5a489fab0-kube-api-access-4q4vm\") pod \"community-operators-gp6vx\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:19 crc kubenswrapper[4811]: I0128 16:12:19.594668 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:20 crc kubenswrapper[4811]: I0128 16:12:20.093044 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gp6vx"] Jan 28 16:12:20 crc kubenswrapper[4811]: W0128 16:12:20.104403 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c02d831_3666_4989_b818_32a5a489fab0.slice/crio-6d05c6b95e634dd0566e5751ca15c2624143e3f2af915578c5d85c827ba66615 WatchSource:0}: Error finding container 6d05c6b95e634dd0566e5751ca15c2624143e3f2af915578c5d85c827ba66615: Status 404 returned error can't find the container with id 6d05c6b95e634dd0566e5751ca15c2624143e3f2af915578c5d85c827ba66615 Jan 28 16:12:20 crc kubenswrapper[4811]: I0128 16:12:20.622786 4811 generic.go:334] "Generic (PLEG): container finished" podID="7c02d831-3666-4989-b818-32a5a489fab0" containerID="425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d" exitCode=0 Jan 28 16:12:20 crc kubenswrapper[4811]: I0128 16:12:20.622861 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerDied","Data":"425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d"} Jan 28 16:12:20 crc kubenswrapper[4811]: I0128 16:12:20.623124 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerStarted","Data":"6d05c6b95e634dd0566e5751ca15c2624143e3f2af915578c5d85c827ba66615"} Jan 28 16:12:21 crc kubenswrapper[4811]: I0128 16:12:21.631181 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerStarted","Data":"3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7"} Jan 28 16:12:22 crc kubenswrapper[4811]: I0128 16:12:22.641270 4811 generic.go:334] "Generic (PLEG): container finished" podID="7c02d831-3666-4989-b818-32a5a489fab0" containerID="3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7" exitCode=0 Jan 28 16:12:22 crc kubenswrapper[4811]: I0128 16:12:22.641324 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerDied","Data":"3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7"} Jan 28 16:12:23 crc kubenswrapper[4811]: I0128 16:12:23.659101 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerStarted","Data":"44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f"} Jan 28 16:12:23 crc kubenswrapper[4811]: I0128 16:12:23.687381 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gp6vx" podStartSLOduration=2.160150011 podStartE2EDuration="4.687357521s" podCreationTimestamp="2026-01-28 16:12:19 +0000 UTC" firstStartedPulling="2026-01-28 16:12:20.624286573 +0000 UTC m=+1633.378650156" lastFinishedPulling="2026-01-28 16:12:23.151494093 +0000 UTC m=+1635.905857666" observedRunningTime="2026-01-28 16:12:23.677828059 +0000 UTC m=+1636.432191642" watchObservedRunningTime="2026-01-28 16:12:23.687357521 +0000 UTC m=+1636.441721104" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.148106 4811 scope.go:117] "RemoveContainer" containerID="4a7d9e07ca981c6204e9b634c786607928fb94b0320825c489d7eca986c71543" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.173465 4811 scope.go:117] "RemoveContainer" containerID="5a3fa3b8d7bb0be864b879d43a6dbbc29d9bee3fe0f2db101af93bfd5822456a" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.213890 4811 scope.go:117] "RemoveContainer" containerID="e8b149fc467c164b5dc15e84b4f1cb9f31f9cc10102d0bf7736810ca7a664fb6" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.252860 4811 scope.go:117] "RemoveContainer" containerID="daf9818e6465dcfb5327240a49eeaa4ce4fa977d0f7bce0e7bcf99376e8eb451" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.290283 4811 scope.go:117] "RemoveContainer" containerID="461b1f2a631c035ee0040497000f314a0c33ac50a81a9089d9f446efd4a45ed5" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.334309 4811 scope.go:117] "RemoveContainer" containerID="e99139d534fdaf6bb0e79ccd58352c7ed2618f532466db2fb7b3493e6ba9b410" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.377273 4811 scope.go:117] "RemoveContainer" containerID="e245bc5e0a213d1b27f569389ed440b4a2a0e5de3a49de48ac3935e44c74ce9b" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.397562 4811 scope.go:117] "RemoveContainer" containerID="d271cbcb27898c830d254bcb5c8dffe5a3f7640e10dbf52bc69624d036c550ec" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.418209 4811 scope.go:117] "RemoveContainer" containerID="80a47d6d78e960264bb33e177da16fbe4abd33aedd8c3332b69b669903b0efc8" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.442258 4811 scope.go:117] "RemoveContainer" containerID="33fc76229b44af1d5e99aae4aff4bb9e9588948d91bdcbefef6909f49f3c29dd" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.457936 4811 scope.go:117] "RemoveContainer" containerID="883dabd11b048cfe00fac20c4bf6061579e989100466ed30712029e12cebf29d" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.474188 4811 scope.go:117] "RemoveContainer" containerID="6bab905966a0769cc19f4f9e9080a14cf74fc575091dd1fb4eefb542e81f131c" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.503939 4811 scope.go:117] "RemoveContainer" containerID="faaf959bb7d719d81e75fe135d7ec377756b2770bdced5ddd98fe7f32a8ff3c2" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.527802 4811 scope.go:117] "RemoveContainer" containerID="1cacc3d8ee6e5ba0678d2018830f89cb975dc25132dd501c515a3eb1e95fb565" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.543694 4811 scope.go:117] "RemoveContainer" containerID="1ceb6f6a94b79fe56385e3ac04ed41338c77b0c74c77cd24cf368cbae3715bd3" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.563622 4811 scope.go:117] "RemoveContainer" containerID="f8ff919f7d2a453d6497a7e7fa333f979d299990ca42012a62c1896d1dbd9155" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.589574 4811 scope.go:117] "RemoveContainer" containerID="ac51924788f9396826cde65d3bed2e0e948d0867ac9860b62b53b6c4f2d570a3" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.612916 4811 scope.go:117] "RemoveContainer" containerID="2c0184b6555e2c8ad21e10f4b931d8a3dd221bc5ed55f0ff10d98fe0449c7daa" Jan 28 16:12:27 crc kubenswrapper[4811]: I0128 16:12:27.649099 4811 scope.go:117] "RemoveContainer" containerID="945e5bdfc0446fe693f7c19ef23a0eb10103bbb962463cff45c55bc97923580b" Jan 28 16:12:28 crc kubenswrapper[4811]: I0128 16:12:28.347731 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:12:28 crc kubenswrapper[4811]: E0128 16:12:28.348198 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:12:29 crc kubenswrapper[4811]: I0128 16:12:29.595827 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:29 crc kubenswrapper[4811]: I0128 16:12:29.596219 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:29 crc kubenswrapper[4811]: I0128 16:12:29.645798 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:29 crc kubenswrapper[4811]: I0128 16:12:29.776608 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:29 crc kubenswrapper[4811]: I0128 16:12:29.875960 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gp6vx"] Jan 28 16:12:31 crc kubenswrapper[4811]: I0128 16:12:31.750810 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gp6vx" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="registry-server" containerID="cri-o://44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f" gracePeriod=2 Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.232622 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.420649 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-utilities\") pod \"7c02d831-3666-4989-b818-32a5a489fab0\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.420758 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4q4vm\" (UniqueName: \"kubernetes.io/projected/7c02d831-3666-4989-b818-32a5a489fab0-kube-api-access-4q4vm\") pod \"7c02d831-3666-4989-b818-32a5a489fab0\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.420817 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-catalog-content\") pod \"7c02d831-3666-4989-b818-32a5a489fab0\" (UID: \"7c02d831-3666-4989-b818-32a5a489fab0\") " Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.422114 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-utilities" (OuterVolumeSpecName: "utilities") pod "7c02d831-3666-4989-b818-32a5a489fab0" (UID: "7c02d831-3666-4989-b818-32a5a489fab0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.427339 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c02d831-3666-4989-b818-32a5a489fab0-kube-api-access-4q4vm" (OuterVolumeSpecName: "kube-api-access-4q4vm") pod "7c02d831-3666-4989-b818-32a5a489fab0" (UID: "7c02d831-3666-4989-b818-32a5a489fab0"). InnerVolumeSpecName "kube-api-access-4q4vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.522461 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.522512 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4q4vm\" (UniqueName: \"kubernetes.io/projected/7c02d831-3666-4989-b818-32a5a489fab0-kube-api-access-4q4vm\") on node \"crc\" DevicePath \"\"" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.764484 4811 generic.go:334] "Generic (PLEG): container finished" podID="7c02d831-3666-4989-b818-32a5a489fab0" containerID="44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f" exitCode=0 Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.764548 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerDied","Data":"44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f"} Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.764590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gp6vx" event={"ID":"7c02d831-3666-4989-b818-32a5a489fab0","Type":"ContainerDied","Data":"6d05c6b95e634dd0566e5751ca15c2624143e3f2af915578c5d85c827ba66615"} Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.764602 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gp6vx" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.764615 4811 scope.go:117] "RemoveContainer" containerID="44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.785266 4811 scope.go:117] "RemoveContainer" containerID="3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.804175 4811 scope.go:117] "RemoveContainer" containerID="425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.823748 4811 scope.go:117] "RemoveContainer" containerID="44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f" Jan 28 16:12:32 crc kubenswrapper[4811]: E0128 16:12:32.824245 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f\": container with ID starting with 44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f not found: ID does not exist" containerID="44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.824279 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f"} err="failed to get container status \"44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f\": rpc error: code = NotFound desc = could not find container \"44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f\": container with ID starting with 44794b8c233721eaa55d503c41ce2dd77930c07cbec2b3f06b7b64a7a3f5f02f not found: ID does not exist" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.824304 4811 scope.go:117] "RemoveContainer" containerID="3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7" Jan 28 16:12:32 crc kubenswrapper[4811]: E0128 16:12:32.824677 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7\": container with ID starting with 3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7 not found: ID does not exist" containerID="3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.824732 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7"} err="failed to get container status \"3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7\": rpc error: code = NotFound desc = could not find container \"3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7\": container with ID starting with 3674aa59374feb554013edf0e0f150ff48978d0683dcbe62b7cbe4e51ed309c7 not found: ID does not exist" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.824764 4811 scope.go:117] "RemoveContainer" containerID="425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d" Jan 28 16:12:32 crc kubenswrapper[4811]: E0128 16:12:32.825152 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d\": container with ID starting with 425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d not found: ID does not exist" containerID="425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.825220 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d"} err="failed to get container status \"425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d\": rpc error: code = NotFound desc = could not find container \"425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d\": container with ID starting with 425702c7b54c9450abab324900e2c90dbcb1e8b642b23887f276930e4f5bf15d not found: ID does not exist" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.896673 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c02d831-3666-4989-b818-32a5a489fab0" (UID: "7c02d831-3666-4989-b818-32a5a489fab0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:12:32 crc kubenswrapper[4811]: I0128 16:12:32.928877 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c02d831-3666-4989-b818-32a5a489fab0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:12:33 crc kubenswrapper[4811]: I0128 16:12:33.100764 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gp6vx"] Jan 28 16:12:33 crc kubenswrapper[4811]: I0128 16:12:33.109379 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gp6vx"] Jan 28 16:12:34 crc kubenswrapper[4811]: I0128 16:12:34.347348 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c02d831-3666-4989-b818-32a5a489fab0" path="/var/lib/kubelet/pods/7c02d831-3666-4989-b818-32a5a489fab0/volumes" Jan 28 16:12:39 crc kubenswrapper[4811]: I0128 16:12:39.340074 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:12:39 crc kubenswrapper[4811]: E0128 16:12:39.340590 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:12:51 crc kubenswrapper[4811]: I0128 16:12:51.339508 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:12:51 crc kubenswrapper[4811]: E0128 16:12:51.340466 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:13:03 crc kubenswrapper[4811]: I0128 16:13:03.339074 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:13:03 crc kubenswrapper[4811]: E0128 16:13:03.339821 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:13:15 crc kubenswrapper[4811]: I0128 16:13:15.339498 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:13:15 crc kubenswrapper[4811]: E0128 16:13:15.340225 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:13:27 crc kubenswrapper[4811]: I0128 16:13:27.951904 4811 scope.go:117] "RemoveContainer" containerID="b0ddc832af1456e23eda70e2a290a6a34b84838cf3f77696c6d4ac858b8f50eb" Jan 28 16:13:27 crc kubenswrapper[4811]: I0128 16:13:27.991178 4811 scope.go:117] "RemoveContainer" containerID="f78d6d49ea579cf735dcc8dce1604031cf24c3e9ba1882bbee2bb70613b01d91" Jan 28 16:13:28 crc kubenswrapper[4811]: I0128 16:13:28.037859 4811 scope.go:117] "RemoveContainer" containerID="29ffefe393b5a77bb0a1a8dbf8b34fffc5338c9b9cf20ce0629162a5692975f5" Jan 28 16:13:28 crc kubenswrapper[4811]: I0128 16:13:28.074512 4811 scope.go:117] "RemoveContainer" containerID="ad829404888e86ff13f629c270205ae6a1f39869e59a1b682907fd81c45dd990" Jan 28 16:13:28 crc kubenswrapper[4811]: I0128 16:13:28.096498 4811 scope.go:117] "RemoveContainer" containerID="9163fd29bb4d295b9f0469a6d83cf56f2bde17afe8a70e37c7ef65ad1e87b52d" Jan 28 16:13:28 crc kubenswrapper[4811]: I0128 16:13:28.130798 4811 scope.go:117] "RemoveContainer" containerID="f19e5ad3c28bace5d654226d41313f51baa5ae3b141172e290e57574882f79e5" Jan 28 16:13:28 crc kubenswrapper[4811]: I0128 16:13:28.347767 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:13:28 crc kubenswrapper[4811]: E0128 16:13:28.348169 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:13:39 crc kubenswrapper[4811]: I0128 16:13:39.340135 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:13:39 crc kubenswrapper[4811]: E0128 16:13:39.341096 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:13:51 crc kubenswrapper[4811]: I0128 16:13:51.340164 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:13:51 crc kubenswrapper[4811]: E0128 16:13:51.341109 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:14:05 crc kubenswrapper[4811]: I0128 16:14:05.338868 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:14:05 crc kubenswrapper[4811]: E0128 16:14:05.340922 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:14:20 crc kubenswrapper[4811]: I0128 16:14:20.339795 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:14:20 crc kubenswrapper[4811]: E0128 16:14:20.340730 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.247634 4811 scope.go:117] "RemoveContainer" containerID="502008b5e1203cf387522bb1e522bcd6610092f38c7e8f24a175db1f14e4d5a3" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.267461 4811 scope.go:117] "RemoveContainer" containerID="5f95164c6bc5517fe5021ef5b89f914d6a0e68587b77f5bb6c5c012a938fb36a" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.290879 4811 scope.go:117] "RemoveContainer" containerID="638bf6a409a2ee8ef91ccc30517ca80008633479d4f2b150788e6d75c6463742" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.309324 4811 scope.go:117] "RemoveContainer" containerID="a5491b7008847bdd58a8450727ced7dbf63f1b967864e6d2e88817c2d77ec750" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.328545 4811 scope.go:117] "RemoveContainer" containerID="22ebe26a0dcd5c950f597ff3f4709d84054b91215a6052d84aa696ebda1c436f" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.349574 4811 scope.go:117] "RemoveContainer" containerID="5704b2f39ed016d4b37800c5475f466e0b8a3d7d89c94a7a9f250a40170a8295" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.370246 4811 scope.go:117] "RemoveContainer" containerID="5339a634a87a740049c83286d07d362f3bfc8b28b45e9db54790227cd799ad11" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.392600 4811 scope.go:117] "RemoveContainer" containerID="8394fb63167b883b169be61e77ba09dc1dc48ea0db4cbe8e5d06a4310c849ea9" Jan 28 16:14:28 crc kubenswrapper[4811]: I0128 16:14:28.424144 4811 scope.go:117] "RemoveContainer" containerID="c766c501d7fef70ac33d495992a7f932c795028bb1028a15f2a477629ccc37be" Jan 28 16:14:33 crc kubenswrapper[4811]: I0128 16:14:33.339890 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:14:33 crc kubenswrapper[4811]: E0128 16:14:33.340504 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:14:45 crc kubenswrapper[4811]: I0128 16:14:45.339284 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:14:45 crc kubenswrapper[4811]: E0128 16:14:45.340150 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:14:59 crc kubenswrapper[4811]: I0128 16:14:59.340207 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:14:59 crc kubenswrapper[4811]: E0128 16:14:59.340911 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.171160 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6"] Jan 28 16:15:00 crc kubenswrapper[4811]: E0128 16:15:00.171505 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="extract-content" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.171521 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="extract-content" Jan 28 16:15:00 crc kubenswrapper[4811]: E0128 16:15:00.171558 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="registry-server" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.171566 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="registry-server" Jan 28 16:15:00 crc kubenswrapper[4811]: E0128 16:15:00.171582 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="extract-utilities" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.171590 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="extract-utilities" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.171741 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c02d831-3666-4989-b818-32a5a489fab0" containerName="registry-server" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.172199 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.174209 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.174508 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.183628 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6"] Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.217325 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qkdw\" (UniqueName: \"kubernetes.io/projected/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-kube-api-access-2qkdw\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.217372 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-secret-volume\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.217423 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-config-volume\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.318715 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-config-volume\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.318813 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qkdw\" (UniqueName: \"kubernetes.io/projected/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-kube-api-access-2qkdw\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.318837 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-secret-volume\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.319639 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-config-volume\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.329084 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-secret-volume\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.335916 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qkdw\" (UniqueName: \"kubernetes.io/projected/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-kube-api-access-2qkdw\") pod \"collect-profiles-29493615-cdnh6\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.492644 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.732575 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6"] Jan 28 16:15:00 crc kubenswrapper[4811]: I0128 16:15:00.975406 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" event={"ID":"27e7b42c-f027-4446-8fcb-1f9cb9ca6691","Type":"ContainerStarted","Data":"546209ae250d5193c690f287f232f49feb8027d655b821b9d383b4eed9868418"} Jan 28 16:15:01 crc kubenswrapper[4811]: I0128 16:15:01.986215 4811 generic.go:334] "Generic (PLEG): container finished" podID="27e7b42c-f027-4446-8fcb-1f9cb9ca6691" containerID="4b60a62e58b3b608ad45cb758e4b9e7bae51b82799ee4ad49da0e0f6e7a25fbd" exitCode=0 Jan 28 16:15:01 crc kubenswrapper[4811]: I0128 16:15:01.986336 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" event={"ID":"27e7b42c-f027-4446-8fcb-1f9cb9ca6691","Type":"ContainerDied","Data":"4b60a62e58b3b608ad45cb758e4b9e7bae51b82799ee4ad49da0e0f6e7a25fbd"} Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.268952 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.366837 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qkdw\" (UniqueName: \"kubernetes.io/projected/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-kube-api-access-2qkdw\") pod \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.366891 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-secret-volume\") pod \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.366938 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-config-volume\") pod \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\" (UID: \"27e7b42c-f027-4446-8fcb-1f9cb9ca6691\") " Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.369807 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-config-volume" (OuterVolumeSpecName: "config-volume") pod "27e7b42c-f027-4446-8fcb-1f9cb9ca6691" (UID: "27e7b42c-f027-4446-8fcb-1f9cb9ca6691"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.373759 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "27e7b42c-f027-4446-8fcb-1f9cb9ca6691" (UID: "27e7b42c-f027-4446-8fcb-1f9cb9ca6691"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.374001 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-kube-api-access-2qkdw" (OuterVolumeSpecName: "kube-api-access-2qkdw") pod "27e7b42c-f027-4446-8fcb-1f9cb9ca6691" (UID: "27e7b42c-f027-4446-8fcb-1f9cb9ca6691"). InnerVolumeSpecName "kube-api-access-2qkdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.468836 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qkdw\" (UniqueName: \"kubernetes.io/projected/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-kube-api-access-2qkdw\") on node \"crc\" DevicePath \"\"" Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.468906 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:15:03 crc kubenswrapper[4811]: I0128 16:15:03.468926 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/27e7b42c-f027-4446-8fcb-1f9cb9ca6691-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:15:04 crc kubenswrapper[4811]: I0128 16:15:04.031623 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" event={"ID":"27e7b42c-f027-4446-8fcb-1f9cb9ca6691","Type":"ContainerDied","Data":"546209ae250d5193c690f287f232f49feb8027d655b821b9d383b4eed9868418"} Jan 28 16:15:04 crc kubenswrapper[4811]: I0128 16:15:04.031663 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="546209ae250d5193c690f287f232f49feb8027d655b821b9d383b4eed9868418" Jan 28 16:15:04 crc kubenswrapper[4811]: I0128 16:15:04.031862 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6" Jan 28 16:15:14 crc kubenswrapper[4811]: I0128 16:15:14.340123 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:15:15 crc kubenswrapper[4811]: I0128 16:15:15.107139 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"5964745d385c807e0bcacf2afe7db0e53c1eab505dceba168af601fbb26aa67c"} Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.609469 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mbbtk"] Jan 28 16:15:26 crc kubenswrapper[4811]: E0128 16:15:26.610351 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e7b42c-f027-4446-8fcb-1f9cb9ca6691" containerName="collect-profiles" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.610370 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e7b42c-f027-4446-8fcb-1f9cb9ca6691" containerName="collect-profiles" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.610578 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e7b42c-f027-4446-8fcb-1f9cb9ca6691" containerName="collect-profiles" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.612008 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.628153 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mbbtk"] Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.700552 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-catalog-content\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.700625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk462\" (UniqueName: \"kubernetes.io/projected/f8725a7e-2945-4600-9031-d849e865c7d1-kube-api-access-tk462\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.700690 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-utilities\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.801316 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-catalog-content\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.801356 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk462\" (UniqueName: \"kubernetes.io/projected/f8725a7e-2945-4600-9031-d849e865c7d1-kube-api-access-tk462\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.801400 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-utilities\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.803958 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-catalog-content\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.803976 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-utilities\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.822649 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk462\" (UniqueName: \"kubernetes.io/projected/f8725a7e-2945-4600-9031-d849e865c7d1-kube-api-access-tk462\") pod \"redhat-operators-mbbtk\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:26 crc kubenswrapper[4811]: I0128 16:15:26.939720 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:27 crc kubenswrapper[4811]: I0128 16:15:27.376438 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mbbtk"] Jan 28 16:15:27 crc kubenswrapper[4811]: W0128 16:15:27.382513 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8725a7e_2945_4600_9031_d849e865c7d1.slice/crio-864bbb3b03694ab8ad505baf36f09ebd7d89b02dd5489188cd65fe153d641412 WatchSource:0}: Error finding container 864bbb3b03694ab8ad505baf36f09ebd7d89b02dd5489188cd65fe153d641412: Status 404 returned error can't find the container with id 864bbb3b03694ab8ad505baf36f09ebd7d89b02dd5489188cd65fe153d641412 Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.211021 4811 generic.go:334] "Generic (PLEG): container finished" podID="f8725a7e-2945-4600-9031-d849e865c7d1" containerID="96043dbb7afebececa6b04b80434a8f6386216fb492d93aba5c44b61c917fe86" exitCode=0 Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.211076 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbbtk" event={"ID":"f8725a7e-2945-4600-9031-d849e865c7d1","Type":"ContainerDied","Data":"96043dbb7afebececa6b04b80434a8f6386216fb492d93aba5c44b61c917fe86"} Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.211109 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbbtk" event={"ID":"f8725a7e-2945-4600-9031-d849e865c7d1","Type":"ContainerStarted","Data":"864bbb3b03694ab8ad505baf36f09ebd7d89b02dd5489188cd65fe153d641412"} Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.213366 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.568161 4811 scope.go:117] "RemoveContainer" containerID="0f49c1f408b006371b9d24324b32ed0babd4053c0669c50182a9c6cec0d5e3b3" Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.619055 4811 scope.go:117] "RemoveContainer" containerID="e82d4770dbd8df1af3c60b19065079eef075e181597a6fe329d2bfc69a97a55f" Jan 28 16:15:28 crc kubenswrapper[4811]: I0128 16:15:28.660096 4811 scope.go:117] "RemoveContainer" containerID="fa157babbe500c1563c46cb52c30c82654096cb6b82bef1d0c2006eedc63c1d2" Jan 28 16:15:30 crc kubenswrapper[4811]: I0128 16:15:30.249899 4811 generic.go:334] "Generic (PLEG): container finished" podID="f8725a7e-2945-4600-9031-d849e865c7d1" containerID="4c245360cccf580c09b7f37403696042398616f3cc64140572fd4348f3c4b00e" exitCode=0 Jan 28 16:15:30 crc kubenswrapper[4811]: I0128 16:15:30.250413 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbbtk" event={"ID":"f8725a7e-2945-4600-9031-d849e865c7d1","Type":"ContainerDied","Data":"4c245360cccf580c09b7f37403696042398616f3cc64140572fd4348f3c4b00e"} Jan 28 16:15:31 crc kubenswrapper[4811]: I0128 16:15:31.259314 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbbtk" event={"ID":"f8725a7e-2945-4600-9031-d849e865c7d1","Type":"ContainerStarted","Data":"9e450abef371304f7715d871bde9b22eb2d64608d6a424d3df01557f8f083439"} Jan 28 16:15:31 crc kubenswrapper[4811]: I0128 16:15:31.286424 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mbbtk" podStartSLOduration=2.726568012 podStartE2EDuration="5.286405251s" podCreationTimestamp="2026-01-28 16:15:26 +0000 UTC" firstStartedPulling="2026-01-28 16:15:28.213070768 +0000 UTC m=+1820.967434341" lastFinishedPulling="2026-01-28 16:15:30.772907987 +0000 UTC m=+1823.527271580" observedRunningTime="2026-01-28 16:15:31.278920237 +0000 UTC m=+1824.033283820" watchObservedRunningTime="2026-01-28 16:15:31.286405251 +0000 UTC m=+1824.040768844" Jan 28 16:15:36 crc kubenswrapper[4811]: I0128 16:15:36.939939 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:36 crc kubenswrapper[4811]: I0128 16:15:36.942624 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:36 crc kubenswrapper[4811]: I0128 16:15:36.994738 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:37 crc kubenswrapper[4811]: I0128 16:15:37.341447 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:37 crc kubenswrapper[4811]: I0128 16:15:37.385297 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mbbtk"] Jan 28 16:15:39 crc kubenswrapper[4811]: I0128 16:15:39.315228 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mbbtk" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="registry-server" containerID="cri-o://9e450abef371304f7715d871bde9b22eb2d64608d6a424d3df01557f8f083439" gracePeriod=2 Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.327588 4811 generic.go:334] "Generic (PLEG): container finished" podID="f8725a7e-2945-4600-9031-d849e865c7d1" containerID="9e450abef371304f7715d871bde9b22eb2d64608d6a424d3df01557f8f083439" exitCode=0 Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.327734 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbbtk" event={"ID":"f8725a7e-2945-4600-9031-d849e865c7d1","Type":"ContainerDied","Data":"9e450abef371304f7715d871bde9b22eb2d64608d6a424d3df01557f8f083439"} Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.719227 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.804811 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-catalog-content\") pod \"f8725a7e-2945-4600-9031-d849e865c7d1\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.804855 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk462\" (UniqueName: \"kubernetes.io/projected/f8725a7e-2945-4600-9031-d849e865c7d1-kube-api-access-tk462\") pod \"f8725a7e-2945-4600-9031-d849e865c7d1\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.804892 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-utilities\") pod \"f8725a7e-2945-4600-9031-d849e865c7d1\" (UID: \"f8725a7e-2945-4600-9031-d849e865c7d1\") " Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.806344 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-utilities" (OuterVolumeSpecName: "utilities") pod "f8725a7e-2945-4600-9031-d849e865c7d1" (UID: "f8725a7e-2945-4600-9031-d849e865c7d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.819580 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8725a7e-2945-4600-9031-d849e865c7d1-kube-api-access-tk462" (OuterVolumeSpecName: "kube-api-access-tk462") pod "f8725a7e-2945-4600-9031-d849e865c7d1" (UID: "f8725a7e-2945-4600-9031-d849e865c7d1"). InnerVolumeSpecName "kube-api-access-tk462". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.906986 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk462\" (UniqueName: \"kubernetes.io/projected/f8725a7e-2945-4600-9031-d849e865c7d1-kube-api-access-tk462\") on node \"crc\" DevicePath \"\"" Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.907037 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:15:40 crc kubenswrapper[4811]: I0128 16:15:40.939549 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8725a7e-2945-4600-9031-d849e865c7d1" (UID: "f8725a7e-2945-4600-9031-d849e865c7d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.008091 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8725a7e-2945-4600-9031-d849e865c7d1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.337168 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbbtk" event={"ID":"f8725a7e-2945-4600-9031-d849e865c7d1","Type":"ContainerDied","Data":"864bbb3b03694ab8ad505baf36f09ebd7d89b02dd5489188cd65fe153d641412"} Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.337220 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbbtk" Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.337465 4811 scope.go:117] "RemoveContainer" containerID="9e450abef371304f7715d871bde9b22eb2d64608d6a424d3df01557f8f083439" Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.365850 4811 scope.go:117] "RemoveContainer" containerID="4c245360cccf580c09b7f37403696042398616f3cc64140572fd4348f3c4b00e" Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.376826 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mbbtk"] Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.381693 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mbbtk"] Jan 28 16:15:41 crc kubenswrapper[4811]: I0128 16:15:41.389835 4811 scope.go:117] "RemoveContainer" containerID="96043dbb7afebececa6b04b80434a8f6386216fb492d93aba5c44b61c917fe86" Jan 28 16:15:42 crc kubenswrapper[4811]: I0128 16:15:42.349262 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" path="/var/lib/kubelet/pods/f8725a7e-2945-4600-9031-d849e865c7d1/volumes" Jan 28 16:16:28 crc kubenswrapper[4811]: I0128 16:16:28.732121 4811 scope.go:117] "RemoveContainer" containerID="58eb72bdbc49c1db4bd6a8f3ff0f677fe0081e578427777c49080084da3fac18" Jan 28 16:16:28 crc kubenswrapper[4811]: I0128 16:16:28.763127 4811 scope.go:117] "RemoveContainer" containerID="ea9b41d1b39833e282998b715a5c1ac858988843c729530075218e5953aca943" Jan 28 16:16:28 crc kubenswrapper[4811]: I0128 16:16:28.796776 4811 scope.go:117] "RemoveContainer" containerID="edeadfe37f452b4de66d49e0830821b1e8e7fe1d122ac5020fcef25c60b8e824" Jan 28 16:16:28 crc kubenswrapper[4811]: I0128 16:16:28.833594 4811 scope.go:117] "RemoveContainer" containerID="d9f9a0f8d8ef4d173f67b5dae4bd737f60adc547940317d4473c9e7ac4c9d5d2" Jan 28 16:16:28 crc kubenswrapper[4811]: I0128 16:16:28.847557 4811 scope.go:117] "RemoveContainer" containerID="4118d781d07ee77f52069b5e89d67a8d192eb68dd733377011ee2da5f304380b" Jan 28 16:17:33 crc kubenswrapper[4811]: I0128 16:17:33.087326 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:17:33 crc kubenswrapper[4811]: I0128 16:17:33.087913 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:18:03 crc kubenswrapper[4811]: I0128 16:18:03.087542 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:18:03 crc kubenswrapper[4811]: I0128 16:18:03.088188 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.087021 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.087609 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.087675 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.090695 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5964745d385c807e0bcacf2afe7db0e53c1eab505dceba168af601fbb26aa67c"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.090797 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://5964745d385c807e0bcacf2afe7db0e53c1eab505dceba168af601fbb26aa67c" gracePeriod=600 Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.672087 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="5964745d385c807e0bcacf2afe7db0e53c1eab505dceba168af601fbb26aa67c" exitCode=0 Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.672125 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"5964745d385c807e0bcacf2afe7db0e53c1eab505dceba168af601fbb26aa67c"} Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.672473 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b"} Jan 28 16:18:33 crc kubenswrapper[4811]: I0128 16:18:33.672496 4811 scope.go:117] "RemoveContainer" containerID="8a7a828294dde0bd8dbd211cdc8b81d51144a40905535ed75ebac45ca7ba49c2" Jan 28 16:20:33 crc kubenswrapper[4811]: I0128 16:20:33.086991 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:20:33 crc kubenswrapper[4811]: I0128 16:20:33.087488 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:21:03 crc kubenswrapper[4811]: I0128 16:21:03.087114 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:21:03 crc kubenswrapper[4811]: I0128 16:21:03.087681 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.087750 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.088291 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.088398 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.089017 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.089088 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" gracePeriod=600 Jan 28 16:21:33 crc kubenswrapper[4811]: E0128 16:21:33.208268 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.971187 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" exitCode=0 Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.971531 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b"} Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.971632 4811 scope.go:117] "RemoveContainer" containerID="5964745d385c807e0bcacf2afe7db0e53c1eab505dceba168af601fbb26aa67c" Jan 28 16:21:33 crc kubenswrapper[4811]: I0128 16:21:33.972296 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:21:33 crc kubenswrapper[4811]: E0128 16:21:33.972634 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:21:48 crc kubenswrapper[4811]: I0128 16:21:48.345450 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:21:48 crc kubenswrapper[4811]: E0128 16:21:48.346307 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.593569 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z5t68"] Jan 28 16:21:52 crc kubenswrapper[4811]: E0128 16:21:52.594072 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="extract-utilities" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.594083 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="extract-utilities" Jan 28 16:21:52 crc kubenswrapper[4811]: E0128 16:21:52.594108 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="registry-server" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.594116 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="registry-server" Jan 28 16:21:52 crc kubenswrapper[4811]: E0128 16:21:52.594128 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="extract-content" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.594135 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="extract-content" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.594256 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8725a7e-2945-4600-9031-d849e865c7d1" containerName="registry-server" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.595180 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.610074 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5t68"] Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.693670 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fp5k\" (UniqueName: \"kubernetes.io/projected/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-kube-api-access-2fp5k\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.693719 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-catalog-content\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.693749 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-utilities\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.795356 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fp5k\" (UniqueName: \"kubernetes.io/projected/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-kube-api-access-2fp5k\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.795419 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-catalog-content\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.795474 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-utilities\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.795947 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-utilities\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.796008 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-catalog-content\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.815760 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fp5k\" (UniqueName: \"kubernetes.io/projected/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-kube-api-access-2fp5k\") pod \"redhat-marketplace-z5t68\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:52 crc kubenswrapper[4811]: I0128 16:21:52.912765 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:21:53 crc kubenswrapper[4811]: I0128 16:21:53.348930 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5t68"] Jan 28 16:21:54 crc kubenswrapper[4811]: I0128 16:21:54.366595 4811 generic.go:334] "Generic (PLEG): container finished" podID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerID="69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9" exitCode=0 Jan 28 16:21:54 crc kubenswrapper[4811]: I0128 16:21:54.366812 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z5t68" event={"ID":"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13","Type":"ContainerDied","Data":"69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9"} Jan 28 16:21:54 crc kubenswrapper[4811]: I0128 16:21:54.366912 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z5t68" event={"ID":"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13","Type":"ContainerStarted","Data":"1ac56dd530e3bf5142db55ba133335c3efa3886faed717d47bb3b5c7e24c0ed0"} Jan 28 16:21:54 crc kubenswrapper[4811]: I0128 16:21:54.369792 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.011677 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x8ght"] Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.031164 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.042252 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x8ght"] Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.045956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l95hf\" (UniqueName: \"kubernetes.io/projected/86f56c99-3319-421c-9ca3-14027b9fe5aa-kube-api-access-l95hf\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.046100 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-catalog-content\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.046257 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-utilities\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.147215 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-utilities\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.147344 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l95hf\" (UniqueName: \"kubernetes.io/projected/86f56c99-3319-421c-9ca3-14027b9fe5aa-kube-api-access-l95hf\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.147647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-catalog-content\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.148045 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-utilities\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.148163 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-catalog-content\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.169142 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l95hf\" (UniqueName: \"kubernetes.io/projected/86f56c99-3319-421c-9ca3-14027b9fe5aa-kube-api-access-l95hf\") pod \"certified-operators-x8ght\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.361685 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:21:55 crc kubenswrapper[4811]: I0128 16:21:55.796950 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x8ght"] Jan 28 16:21:55 crc kubenswrapper[4811]: W0128 16:21:55.808371 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86f56c99_3319_421c_9ca3_14027b9fe5aa.slice/crio-7bdb925ae4ca2e8d12a1aa06184030e94365eec679b580f44cbb2de92c162f33 WatchSource:0}: Error finding container 7bdb925ae4ca2e8d12a1aa06184030e94365eec679b580f44cbb2de92c162f33: Status 404 returned error can't find the container with id 7bdb925ae4ca2e8d12a1aa06184030e94365eec679b580f44cbb2de92c162f33 Jan 28 16:21:56 crc kubenswrapper[4811]: I0128 16:21:56.391124 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x8ght" event={"ID":"86f56c99-3319-421c-9ca3-14027b9fe5aa","Type":"ContainerStarted","Data":"7bdb925ae4ca2e8d12a1aa06184030e94365eec679b580f44cbb2de92c162f33"} Jan 28 16:21:57 crc kubenswrapper[4811]: I0128 16:21:57.398073 4811 generic.go:334] "Generic (PLEG): container finished" podID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerID="e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2" exitCode=0 Jan 28 16:21:57 crc kubenswrapper[4811]: I0128 16:21:57.398175 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x8ght" event={"ID":"86f56c99-3319-421c-9ca3-14027b9fe5aa","Type":"ContainerDied","Data":"e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2"} Jan 28 16:21:57 crc kubenswrapper[4811]: I0128 16:21:57.401133 4811 generic.go:334] "Generic (PLEG): container finished" podID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerID="55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b" exitCode=0 Jan 28 16:21:57 crc kubenswrapper[4811]: I0128 16:21:57.401175 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z5t68" event={"ID":"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13","Type":"ContainerDied","Data":"55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b"} Jan 28 16:21:59 crc kubenswrapper[4811]: I0128 16:21:59.339383 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:21:59 crc kubenswrapper[4811]: E0128 16:21:59.339869 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:22:00 crc kubenswrapper[4811]: I0128 16:22:00.423637 4811 generic.go:334] "Generic (PLEG): container finished" podID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerID="fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373" exitCode=0 Jan 28 16:22:00 crc kubenswrapper[4811]: I0128 16:22:00.423718 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x8ght" event={"ID":"86f56c99-3319-421c-9ca3-14027b9fe5aa","Type":"ContainerDied","Data":"fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373"} Jan 28 16:22:00 crc kubenswrapper[4811]: I0128 16:22:00.426383 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z5t68" event={"ID":"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13","Type":"ContainerStarted","Data":"fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee"} Jan 28 16:22:00 crc kubenswrapper[4811]: I0128 16:22:00.465108 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-z5t68" podStartSLOduration=3.244524842 podStartE2EDuration="8.465093274s" podCreationTimestamp="2026-01-28 16:21:52 +0000 UTC" firstStartedPulling="2026-01-28 16:21:54.369591533 +0000 UTC m=+2207.123955116" lastFinishedPulling="2026-01-28 16:21:59.590159965 +0000 UTC m=+2212.344523548" observedRunningTime="2026-01-28 16:22:00.461089173 +0000 UTC m=+2213.215452756" watchObservedRunningTime="2026-01-28 16:22:00.465093274 +0000 UTC m=+2213.219456857" Jan 28 16:22:02 crc kubenswrapper[4811]: I0128 16:22:02.440808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x8ght" event={"ID":"86f56c99-3319-421c-9ca3-14027b9fe5aa","Type":"ContainerStarted","Data":"18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0"} Jan 28 16:22:02 crc kubenswrapper[4811]: I0128 16:22:02.462355 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x8ght" podStartSLOduration=4.568523341 podStartE2EDuration="8.462337498s" podCreationTimestamp="2026-01-28 16:21:54 +0000 UTC" firstStartedPulling="2026-01-28 16:21:57.401141591 +0000 UTC m=+2210.155505174" lastFinishedPulling="2026-01-28 16:22:01.294955748 +0000 UTC m=+2214.049319331" observedRunningTime="2026-01-28 16:22:02.458453622 +0000 UTC m=+2215.212817215" watchObservedRunningTime="2026-01-28 16:22:02.462337498 +0000 UTC m=+2215.216701101" Jan 28 16:22:02 crc kubenswrapper[4811]: I0128 16:22:02.913853 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:22:02 crc kubenswrapper[4811]: I0128 16:22:02.914152 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:22:02 crc kubenswrapper[4811]: I0128 16:22:02.964636 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:22:05 crc kubenswrapper[4811]: I0128 16:22:05.362191 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:22:05 crc kubenswrapper[4811]: I0128 16:22:05.362584 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:22:05 crc kubenswrapper[4811]: I0128 16:22:05.401861 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:22:10 crc kubenswrapper[4811]: I0128 16:22:10.339384 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:22:10 crc kubenswrapper[4811]: E0128 16:22:10.339890 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:22:12 crc kubenswrapper[4811]: I0128 16:22:12.974305 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:22:13 crc kubenswrapper[4811]: I0128 16:22:13.034676 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5t68"] Jan 28 16:22:13 crc kubenswrapper[4811]: I0128 16:22:13.529526 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-z5t68" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="registry-server" containerID="cri-o://fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee" gracePeriod=2 Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.475071 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.548006 4811 generic.go:334] "Generic (PLEG): container finished" podID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerID="fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee" exitCode=0 Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.548055 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z5t68" event={"ID":"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13","Type":"ContainerDied","Data":"fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee"} Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.548121 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z5t68" event={"ID":"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13","Type":"ContainerDied","Data":"1ac56dd530e3bf5142db55ba133335c3efa3886faed717d47bb3b5c7e24c0ed0"} Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.548145 4811 scope.go:117] "RemoveContainer" containerID="fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.548289 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z5t68" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.563152 4811 scope.go:117] "RemoveContainer" containerID="55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.577521 4811 scope.go:117] "RemoveContainer" containerID="69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.598683 4811 scope.go:117] "RemoveContainer" containerID="fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee" Jan 28 16:22:14 crc kubenswrapper[4811]: E0128 16:22:14.599067 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee\": container with ID starting with fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee not found: ID does not exist" containerID="fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.599137 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee"} err="failed to get container status \"fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee\": rpc error: code = NotFound desc = could not find container \"fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee\": container with ID starting with fb671841a9521a07d3b306069c78c9fad7638a0338a29f608f0dce24268211ee not found: ID does not exist" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.599179 4811 scope.go:117] "RemoveContainer" containerID="55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b" Jan 28 16:22:14 crc kubenswrapper[4811]: E0128 16:22:14.599530 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b\": container with ID starting with 55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b not found: ID does not exist" containerID="55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.599569 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b"} err="failed to get container status \"55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b\": rpc error: code = NotFound desc = could not find container \"55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b\": container with ID starting with 55cb6c1842e0fdda11f2460cd5860de784e562554f25dbb35e0a86050668cc6b not found: ID does not exist" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.599603 4811 scope.go:117] "RemoveContainer" containerID="69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9" Jan 28 16:22:14 crc kubenswrapper[4811]: E0128 16:22:14.599843 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9\": container with ID starting with 69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9 not found: ID does not exist" containerID="69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.599878 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9"} err="failed to get container status \"69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9\": rpc error: code = NotFound desc = could not find container \"69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9\": container with ID starting with 69747bafd9c09ff31e2d51ecbb15d15c36d54704bc0ba7d340504f614b5d1ca9 not found: ID does not exist" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.665702 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fp5k\" (UniqueName: \"kubernetes.io/projected/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-kube-api-access-2fp5k\") pod \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.665934 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-utilities\") pod \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.665963 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-catalog-content\") pod \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\" (UID: \"a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13\") " Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.667307 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-utilities" (OuterVolumeSpecName: "utilities") pod "a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" (UID: "a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.672925 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-kube-api-access-2fp5k" (OuterVolumeSpecName: "kube-api-access-2fp5k") pod "a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" (UID: "a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13"). InnerVolumeSpecName "kube-api-access-2fp5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.688502 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" (UID: "a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.768087 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fp5k\" (UniqueName: \"kubernetes.io/projected/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-kube-api-access-2fp5k\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.768490 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.768507 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.913995 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5t68"] Jan 28 16:22:14 crc kubenswrapper[4811]: I0128 16:22:14.924601 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5t68"] Jan 28 16:22:15 crc kubenswrapper[4811]: I0128 16:22:15.436365 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:22:16 crc kubenswrapper[4811]: I0128 16:22:16.348604 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" path="/var/lib/kubelet/pods/a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13/volumes" Jan 28 16:22:17 crc kubenswrapper[4811]: I0128 16:22:17.622688 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x8ght"] Jan 28 16:22:17 crc kubenswrapper[4811]: I0128 16:22:17.622970 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x8ght" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="registry-server" containerID="cri-o://18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0" gracePeriod=2 Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.185524 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.324854 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l95hf\" (UniqueName: \"kubernetes.io/projected/86f56c99-3319-421c-9ca3-14027b9fe5aa-kube-api-access-l95hf\") pod \"86f56c99-3319-421c-9ca3-14027b9fe5aa\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.324986 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-utilities\") pod \"86f56c99-3319-421c-9ca3-14027b9fe5aa\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.325224 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-catalog-content\") pod \"86f56c99-3319-421c-9ca3-14027b9fe5aa\" (UID: \"86f56c99-3319-421c-9ca3-14027b9fe5aa\") " Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.326663 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-utilities" (OuterVolumeSpecName: "utilities") pod "86f56c99-3319-421c-9ca3-14027b9fe5aa" (UID: "86f56c99-3319-421c-9ca3-14027b9fe5aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.330596 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86f56c99-3319-421c-9ca3-14027b9fe5aa-kube-api-access-l95hf" (OuterVolumeSpecName: "kube-api-access-l95hf") pod "86f56c99-3319-421c-9ca3-14027b9fe5aa" (UID: "86f56c99-3319-421c-9ca3-14027b9fe5aa"). InnerVolumeSpecName "kube-api-access-l95hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.383055 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86f56c99-3319-421c-9ca3-14027b9fe5aa" (UID: "86f56c99-3319-421c-9ca3-14027b9fe5aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.426652 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.426792 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l95hf\" (UniqueName: \"kubernetes.io/projected/86f56c99-3319-421c-9ca3-14027b9fe5aa-kube-api-access-l95hf\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.426955 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f56c99-3319-421c-9ca3-14027b9fe5aa-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.581709 4811 generic.go:334] "Generic (PLEG): container finished" podID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerID="18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0" exitCode=0 Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.581871 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x8ght" event={"ID":"86f56c99-3319-421c-9ca3-14027b9fe5aa","Type":"ContainerDied","Data":"18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0"} Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.582013 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x8ght" event={"ID":"86f56c99-3319-421c-9ca3-14027b9fe5aa","Type":"ContainerDied","Data":"7bdb925ae4ca2e8d12a1aa06184030e94365eec679b580f44cbb2de92c162f33"} Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.582037 4811 scope.go:117] "RemoveContainer" containerID="18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.581954 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x8ght" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.610007 4811 scope.go:117] "RemoveContainer" containerID="fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.612719 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x8ght"] Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.620107 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x8ght"] Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.641678 4811 scope.go:117] "RemoveContainer" containerID="e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.664829 4811 scope.go:117] "RemoveContainer" containerID="18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0" Jan 28 16:22:18 crc kubenswrapper[4811]: E0128 16:22:18.665528 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0\": container with ID starting with 18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0 not found: ID does not exist" containerID="18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.665568 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0"} err="failed to get container status \"18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0\": rpc error: code = NotFound desc = could not find container \"18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0\": container with ID starting with 18c1da7f490b2585c54008e81fc6ddebab2cd04d6f9468b1320954e68d6c96f0 not found: ID does not exist" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.665598 4811 scope.go:117] "RemoveContainer" containerID="fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373" Jan 28 16:22:18 crc kubenswrapper[4811]: E0128 16:22:18.666015 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373\": container with ID starting with fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373 not found: ID does not exist" containerID="fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.666042 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373"} err="failed to get container status \"fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373\": rpc error: code = NotFound desc = could not find container \"fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373\": container with ID starting with fab3f27c78f0707e903f27372397ee73ca4c7e07c3756b91bedd9a2961360373 not found: ID does not exist" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.666059 4811 scope.go:117] "RemoveContainer" containerID="e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2" Jan 28 16:22:18 crc kubenswrapper[4811]: E0128 16:22:18.666477 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2\": container with ID starting with e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2 not found: ID does not exist" containerID="e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2" Jan 28 16:22:18 crc kubenswrapper[4811]: I0128 16:22:18.666521 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2"} err="failed to get container status \"e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2\": rpc error: code = NotFound desc = could not find container \"e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2\": container with ID starting with e6da4c711f748ab2dcf9831ec36b7b4f4392ec9c47bf3f429c432dc86d45c8c2 not found: ID does not exist" Jan 28 16:22:20 crc kubenswrapper[4811]: I0128 16:22:20.349712 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" path="/var/lib/kubelet/pods/86f56c99-3319-421c-9ca3-14027b9fe5aa/volumes" Jan 28 16:22:25 crc kubenswrapper[4811]: I0128 16:22:25.340775 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:22:25 crc kubenswrapper[4811]: E0128 16:22:25.341613 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:22:38 crc kubenswrapper[4811]: I0128 16:22:38.347686 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:22:38 crc kubenswrapper[4811]: E0128 16:22:38.348620 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.979471 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z8b6l"] Jan 28 16:22:41 crc kubenswrapper[4811]: E0128 16:22:41.981036 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="extract-content" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.981137 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="extract-content" Jan 28 16:22:41 crc kubenswrapper[4811]: E0128 16:22:41.981211 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="extract-content" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.981270 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="extract-content" Jan 28 16:22:41 crc kubenswrapper[4811]: E0128 16:22:41.981343 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="extract-utilities" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.981398 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="extract-utilities" Jan 28 16:22:41 crc kubenswrapper[4811]: E0128 16:22:41.981492 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="registry-server" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.981545 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="registry-server" Jan 28 16:22:41 crc kubenswrapper[4811]: E0128 16:22:41.981598 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="extract-utilities" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.981669 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="extract-utilities" Jan 28 16:22:41 crc kubenswrapper[4811]: E0128 16:22:41.981770 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="registry-server" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.981828 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="registry-server" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.982042 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a40bb0e2-6e4a-4c21-a4eb-fe758c68ea13" containerName="registry-server" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.982121 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="86f56c99-3319-421c-9ca3-14027b9fe5aa" containerName="registry-server" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.984662 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:41 crc kubenswrapper[4811]: I0128 16:22:41.990579 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z8b6l"] Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.186490 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-utilities\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.186566 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-catalog-content\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.186616 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9gjl\" (UniqueName: \"kubernetes.io/projected/f6769ff7-feec-45ba-a958-09315e43975e-kube-api-access-k9gjl\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.287817 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-utilities\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.287869 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-catalog-content\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.287905 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9gjl\" (UniqueName: \"kubernetes.io/projected/f6769ff7-feec-45ba-a958-09315e43975e-kube-api-access-k9gjl\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.288305 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-utilities\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.288756 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-catalog-content\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.310700 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9gjl\" (UniqueName: \"kubernetes.io/projected/f6769ff7-feec-45ba-a958-09315e43975e-kube-api-access-k9gjl\") pod \"community-operators-z8b6l\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.315724 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:42 crc kubenswrapper[4811]: I0128 16:22:42.828689 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z8b6l"] Jan 28 16:22:43 crc kubenswrapper[4811]: I0128 16:22:43.800025 4811 generic.go:334] "Generic (PLEG): container finished" podID="f6769ff7-feec-45ba-a958-09315e43975e" containerID="b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6" exitCode=0 Jan 28 16:22:43 crc kubenswrapper[4811]: I0128 16:22:43.800075 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z8b6l" event={"ID":"f6769ff7-feec-45ba-a958-09315e43975e","Type":"ContainerDied","Data":"b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6"} Jan 28 16:22:43 crc kubenswrapper[4811]: I0128 16:22:43.800107 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z8b6l" event={"ID":"f6769ff7-feec-45ba-a958-09315e43975e","Type":"ContainerStarted","Data":"d7d7689d9edcc5774827472a118c8abfadcf033acdd34a3fc2cc210b8ea5b86f"} Jan 28 16:22:45 crc kubenswrapper[4811]: I0128 16:22:45.815015 4811 generic.go:334] "Generic (PLEG): container finished" podID="f6769ff7-feec-45ba-a958-09315e43975e" containerID="2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a" exitCode=0 Jan 28 16:22:45 crc kubenswrapper[4811]: I0128 16:22:45.815415 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z8b6l" event={"ID":"f6769ff7-feec-45ba-a958-09315e43975e","Type":"ContainerDied","Data":"2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a"} Jan 28 16:22:46 crc kubenswrapper[4811]: I0128 16:22:46.828320 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z8b6l" event={"ID":"f6769ff7-feec-45ba-a958-09315e43975e","Type":"ContainerStarted","Data":"cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b"} Jan 28 16:22:46 crc kubenswrapper[4811]: I0128 16:22:46.854868 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z8b6l" podStartSLOduration=3.474028494 podStartE2EDuration="5.85484992s" podCreationTimestamp="2026-01-28 16:22:41 +0000 UTC" firstStartedPulling="2026-01-28 16:22:43.802956669 +0000 UTC m=+2256.557320252" lastFinishedPulling="2026-01-28 16:22:46.183778085 +0000 UTC m=+2258.938141678" observedRunningTime="2026-01-28 16:22:46.848531346 +0000 UTC m=+2259.602894959" watchObservedRunningTime="2026-01-28 16:22:46.85484992 +0000 UTC m=+2259.609213503" Jan 28 16:22:52 crc kubenswrapper[4811]: I0128 16:22:52.316506 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:52 crc kubenswrapper[4811]: I0128 16:22:52.317230 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:52 crc kubenswrapper[4811]: I0128 16:22:52.339959 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:22:52 crc kubenswrapper[4811]: E0128 16:22:52.340634 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:22:52 crc kubenswrapper[4811]: I0128 16:22:52.376312 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:52 crc kubenswrapper[4811]: I0128 16:22:52.942111 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:52 crc kubenswrapper[4811]: I0128 16:22:52.987095 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z8b6l"] Jan 28 16:22:54 crc kubenswrapper[4811]: I0128 16:22:54.901268 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z8b6l" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="registry-server" containerID="cri-o://cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b" gracePeriod=2 Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.351080 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.383225 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9gjl\" (UniqueName: \"kubernetes.io/projected/f6769ff7-feec-45ba-a958-09315e43975e-kube-api-access-k9gjl\") pod \"f6769ff7-feec-45ba-a958-09315e43975e\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.383282 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-utilities\") pod \"f6769ff7-feec-45ba-a958-09315e43975e\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.383351 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-catalog-content\") pod \"f6769ff7-feec-45ba-a958-09315e43975e\" (UID: \"f6769ff7-feec-45ba-a958-09315e43975e\") " Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.384714 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-utilities" (OuterVolumeSpecName: "utilities") pod "f6769ff7-feec-45ba-a958-09315e43975e" (UID: "f6769ff7-feec-45ba-a958-09315e43975e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.391929 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6769ff7-feec-45ba-a958-09315e43975e-kube-api-access-k9gjl" (OuterVolumeSpecName: "kube-api-access-k9gjl") pod "f6769ff7-feec-45ba-a958-09315e43975e" (UID: "f6769ff7-feec-45ba-a958-09315e43975e"). InnerVolumeSpecName "kube-api-access-k9gjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.484406 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9gjl\" (UniqueName: \"kubernetes.io/projected/f6769ff7-feec-45ba-a958-09315e43975e-kube-api-access-k9gjl\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.484455 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.913928 4811 generic.go:334] "Generic (PLEG): container finished" podID="f6769ff7-feec-45ba-a958-09315e43975e" containerID="cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b" exitCode=0 Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.913989 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z8b6l" event={"ID":"f6769ff7-feec-45ba-a958-09315e43975e","Type":"ContainerDied","Data":"cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b"} Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.914032 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z8b6l" event={"ID":"f6769ff7-feec-45ba-a958-09315e43975e","Type":"ContainerDied","Data":"d7d7689d9edcc5774827472a118c8abfadcf033acdd34a3fc2cc210b8ea5b86f"} Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.914059 4811 scope.go:117] "RemoveContainer" containerID="cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.914270 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z8b6l" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.941154 4811 scope.go:117] "RemoveContainer" containerID="2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.972728 4811 scope.go:117] "RemoveContainer" containerID="b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.996044 4811 scope.go:117] "RemoveContainer" containerID="cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b" Jan 28 16:22:55 crc kubenswrapper[4811]: E0128 16:22:55.996512 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b\": container with ID starting with cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b not found: ID does not exist" containerID="cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.996546 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b"} err="failed to get container status \"cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b\": rpc error: code = NotFound desc = could not find container \"cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b\": container with ID starting with cacdffa0c051f983412fb7175670c7680cbaf9581a19c3c6e27e390bb8f3ba8b not found: ID does not exist" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.996569 4811 scope.go:117] "RemoveContainer" containerID="2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a" Jan 28 16:22:55 crc kubenswrapper[4811]: E0128 16:22:55.997004 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a\": container with ID starting with 2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a not found: ID does not exist" containerID="2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.997067 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a"} err="failed to get container status \"2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a\": rpc error: code = NotFound desc = could not find container \"2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a\": container with ID starting with 2b49ad7e9e0e77e3cecc9f0d9c3ae81d1d2a4f7b20f0bd2d9a7fe20c3382658a not found: ID does not exist" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.997084 4811 scope.go:117] "RemoveContainer" containerID="b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6" Jan 28 16:22:55 crc kubenswrapper[4811]: E0128 16:22:55.997472 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6\": container with ID starting with b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6 not found: ID does not exist" containerID="b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6" Jan 28 16:22:55 crc kubenswrapper[4811]: I0128 16:22:55.997530 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6"} err="failed to get container status \"b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6\": rpc error: code = NotFound desc = could not find container \"b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6\": container with ID starting with b915777dc91c87e06af0f6f3b916523cb90fb19b21d7bb467b9261ba8b9800d6 not found: ID does not exist" Jan 28 16:22:56 crc kubenswrapper[4811]: I0128 16:22:56.056527 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f6769ff7-feec-45ba-a958-09315e43975e" (UID: "f6769ff7-feec-45ba-a958-09315e43975e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:22:56 crc kubenswrapper[4811]: I0128 16:22:56.097763 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6769ff7-feec-45ba-a958-09315e43975e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:22:56 crc kubenswrapper[4811]: I0128 16:22:56.248504 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z8b6l"] Jan 28 16:22:56 crc kubenswrapper[4811]: I0128 16:22:56.253333 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z8b6l"] Jan 28 16:22:56 crc kubenswrapper[4811]: I0128 16:22:56.351802 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6769ff7-feec-45ba-a958-09315e43975e" path="/var/lib/kubelet/pods/f6769ff7-feec-45ba-a958-09315e43975e/volumes" Jan 28 16:23:04 crc kubenswrapper[4811]: I0128 16:23:04.340399 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:23:04 crc kubenswrapper[4811]: E0128 16:23:04.341549 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:23:15 crc kubenswrapper[4811]: I0128 16:23:15.339873 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:23:15 crc kubenswrapper[4811]: E0128 16:23:15.340663 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:23:28 crc kubenswrapper[4811]: I0128 16:23:28.349511 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:23:28 crc kubenswrapper[4811]: E0128 16:23:28.350706 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:23:43 crc kubenswrapper[4811]: I0128 16:23:43.340266 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:23:43 crc kubenswrapper[4811]: E0128 16:23:43.341058 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:23:57 crc kubenswrapper[4811]: I0128 16:23:57.339053 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:23:57 crc kubenswrapper[4811]: E0128 16:23:57.339827 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:24:12 crc kubenswrapper[4811]: I0128 16:24:12.340288 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:24:12 crc kubenswrapper[4811]: E0128 16:24:12.341039 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:24:24 crc kubenswrapper[4811]: I0128 16:24:24.340091 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:24:24 crc kubenswrapper[4811]: E0128 16:24:24.340738 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:24:37 crc kubenswrapper[4811]: I0128 16:24:37.339184 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:24:37 crc kubenswrapper[4811]: E0128 16:24:37.339913 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:24:48 crc kubenswrapper[4811]: I0128 16:24:48.343036 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:24:48 crc kubenswrapper[4811]: E0128 16:24:48.343776 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:25:03 crc kubenswrapper[4811]: I0128 16:25:03.339566 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:25:03 crc kubenswrapper[4811]: E0128 16:25:03.340482 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:25:15 crc kubenswrapper[4811]: I0128 16:25:15.340157 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:25:15 crc kubenswrapper[4811]: E0128 16:25:15.341012 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:25:30 crc kubenswrapper[4811]: I0128 16:25:30.339858 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:25:30 crc kubenswrapper[4811]: E0128 16:25:30.340686 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:25:41 crc kubenswrapper[4811]: I0128 16:25:41.339275 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:25:41 crc kubenswrapper[4811]: E0128 16:25:41.339903 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:25:55 crc kubenswrapper[4811]: I0128 16:25:55.339758 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:25:55 crc kubenswrapper[4811]: E0128 16:25:55.340478 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:26:06 crc kubenswrapper[4811]: I0128 16:26:06.339182 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:26:06 crc kubenswrapper[4811]: E0128 16:26:06.339814 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.849656 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nmmgd"] Jan 28 16:26:15 crc kubenswrapper[4811]: E0128 16:26:15.850642 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="extract-utilities" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.850661 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="extract-utilities" Jan 28 16:26:15 crc kubenswrapper[4811]: E0128 16:26:15.850690 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="extract-content" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.850699 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="extract-content" Jan 28 16:26:15 crc kubenswrapper[4811]: E0128 16:26:15.850709 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="registry-server" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.850718 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="registry-server" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.850903 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6769ff7-feec-45ba-a958-09315e43975e" containerName="registry-server" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.852339 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:15 crc kubenswrapper[4811]: I0128 16:26:15.857192 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nmmgd"] Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.021745 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-catalog-content\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.021942 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-utilities\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.022020 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxt7b\" (UniqueName: \"kubernetes.io/projected/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-kube-api-access-xxt7b\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.123142 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-utilities\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.123205 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxt7b\" (UniqueName: \"kubernetes.io/projected/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-kube-api-access-xxt7b\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.123275 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-catalog-content\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.123783 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-utilities\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.123839 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-catalog-content\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.146603 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxt7b\" (UniqueName: \"kubernetes.io/projected/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-kube-api-access-xxt7b\") pod \"redhat-operators-nmmgd\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.175809 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:16 crc kubenswrapper[4811]: I0128 16:26:16.603021 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nmmgd"] Jan 28 16:26:17 crc kubenswrapper[4811]: I0128 16:26:17.335996 4811 generic.go:334] "Generic (PLEG): container finished" podID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerID="59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05" exitCode=0 Jan 28 16:26:17 crc kubenswrapper[4811]: I0128 16:26:17.336064 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerDied","Data":"59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05"} Jan 28 16:26:17 crc kubenswrapper[4811]: I0128 16:26:17.336320 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerStarted","Data":"a988fa68bd9852dab55bde3f8f363ed23cb5fffcef989551e41780e2cb653c0e"} Jan 28 16:26:17 crc kubenswrapper[4811]: I0128 16:26:17.339258 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:26:17 crc kubenswrapper[4811]: E0128 16:26:17.339520 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:26:18 crc kubenswrapper[4811]: I0128 16:26:18.347516 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerStarted","Data":"9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2"} Jan 28 16:26:19 crc kubenswrapper[4811]: I0128 16:26:19.355270 4811 generic.go:334] "Generic (PLEG): container finished" podID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerID="9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2" exitCode=0 Jan 28 16:26:19 crc kubenswrapper[4811]: I0128 16:26:19.355362 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerDied","Data":"9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2"} Jan 28 16:26:20 crc kubenswrapper[4811]: I0128 16:26:20.364041 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerStarted","Data":"fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4"} Jan 28 16:26:20 crc kubenswrapper[4811]: I0128 16:26:20.383559 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nmmgd" podStartSLOduration=2.809329452 podStartE2EDuration="5.383542485s" podCreationTimestamp="2026-01-28 16:26:15 +0000 UTC" firstStartedPulling="2026-01-28 16:26:17.337469805 +0000 UTC m=+2470.091833388" lastFinishedPulling="2026-01-28 16:26:19.911682838 +0000 UTC m=+2472.666046421" observedRunningTime="2026-01-28 16:26:20.382727492 +0000 UTC m=+2473.137091075" watchObservedRunningTime="2026-01-28 16:26:20.383542485 +0000 UTC m=+2473.137906058" Jan 28 16:26:26 crc kubenswrapper[4811]: I0128 16:26:26.176051 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:26 crc kubenswrapper[4811]: I0128 16:26:26.176625 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:26 crc kubenswrapper[4811]: I0128 16:26:26.215412 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:26 crc kubenswrapper[4811]: I0128 16:26:26.446236 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:26 crc kubenswrapper[4811]: I0128 16:26:26.491051 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nmmgd"] Jan 28 16:26:28 crc kubenswrapper[4811]: I0128 16:26:28.412725 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nmmgd" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="registry-server" containerID="cri-o://fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4" gracePeriod=2 Jan 28 16:26:29 crc kubenswrapper[4811]: I0128 16:26:29.904627 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.021617 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-catalog-content\") pod \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.021865 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxt7b\" (UniqueName: \"kubernetes.io/projected/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-kube-api-access-xxt7b\") pod \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.022592 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-utilities\") pod \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\" (UID: \"a8b8244f-85e7-4f95-84ef-bbbde5593d3f\") " Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.023518 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-utilities" (OuterVolumeSpecName: "utilities") pod "a8b8244f-85e7-4f95-84ef-bbbde5593d3f" (UID: "a8b8244f-85e7-4f95-84ef-bbbde5593d3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.034803 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-kube-api-access-xxt7b" (OuterVolumeSpecName: "kube-api-access-xxt7b") pod "a8b8244f-85e7-4f95-84ef-bbbde5593d3f" (UID: "a8b8244f-85e7-4f95-84ef-bbbde5593d3f"). InnerVolumeSpecName "kube-api-access-xxt7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.124040 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxt7b\" (UniqueName: \"kubernetes.io/projected/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-kube-api-access-xxt7b\") on node \"crc\" DevicePath \"\"" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.124075 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.182827 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8b8244f-85e7-4f95-84ef-bbbde5593d3f" (UID: "a8b8244f-85e7-4f95-84ef-bbbde5593d3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.224985 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8b8244f-85e7-4f95-84ef-bbbde5593d3f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.445900 4811 generic.go:334] "Generic (PLEG): container finished" podID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerID="fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4" exitCode=0 Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.445944 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerDied","Data":"fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4"} Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.445988 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmmgd" event={"ID":"a8b8244f-85e7-4f95-84ef-bbbde5593d3f","Type":"ContainerDied","Data":"a988fa68bd9852dab55bde3f8f363ed23cb5fffcef989551e41780e2cb653c0e"} Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.446006 4811 scope.go:117] "RemoveContainer" containerID="fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.445963 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmmgd" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.470386 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nmmgd"] Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.473756 4811 scope.go:117] "RemoveContainer" containerID="9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.479861 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nmmgd"] Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.495184 4811 scope.go:117] "RemoveContainer" containerID="59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.515802 4811 scope.go:117] "RemoveContainer" containerID="fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4" Jan 28 16:26:30 crc kubenswrapper[4811]: E0128 16:26:30.516178 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4\": container with ID starting with fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4 not found: ID does not exist" containerID="fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.516227 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4"} err="failed to get container status \"fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4\": rpc error: code = NotFound desc = could not find container \"fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4\": container with ID starting with fd26a9445cb88159862283db319f8c28e2063c96844d8743d4edf7acd98e74c4 not found: ID does not exist" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.516256 4811 scope.go:117] "RemoveContainer" containerID="9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2" Jan 28 16:26:30 crc kubenswrapper[4811]: E0128 16:26:30.516623 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2\": container with ID starting with 9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2 not found: ID does not exist" containerID="9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.516656 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2"} err="failed to get container status \"9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2\": rpc error: code = NotFound desc = could not find container \"9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2\": container with ID starting with 9a97ba1837071f5493078966b4cd1dc67ae04b2635ae5d8001b1ceb3b64943b2 not found: ID does not exist" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.516680 4811 scope.go:117] "RemoveContainer" containerID="59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05" Jan 28 16:26:30 crc kubenswrapper[4811]: E0128 16:26:30.516980 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05\": container with ID starting with 59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05 not found: ID does not exist" containerID="59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05" Jan 28 16:26:30 crc kubenswrapper[4811]: I0128 16:26:30.517014 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05"} err="failed to get container status \"59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05\": rpc error: code = NotFound desc = could not find container \"59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05\": container with ID starting with 59f1e3649b607cbf3883b3dcdb8283f7eaed6ac806e5bda44d951421d0c3dd05 not found: ID does not exist" Jan 28 16:26:32 crc kubenswrapper[4811]: I0128 16:26:32.339963 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:26:32 crc kubenswrapper[4811]: E0128 16:26:32.340238 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:26:32 crc kubenswrapper[4811]: I0128 16:26:32.349939 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" path="/var/lib/kubelet/pods/a8b8244f-85e7-4f95-84ef-bbbde5593d3f/volumes" Jan 28 16:26:46 crc kubenswrapper[4811]: I0128 16:26:46.340010 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:26:46 crc kubenswrapper[4811]: I0128 16:26:46.554579 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"bf9b4f2403fde04329fdc551c9926f4fafdc3da91e03d66ec5f8cf60e3e0b1f5"} Jan 28 16:29:03 crc kubenswrapper[4811]: I0128 16:29:03.092873 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:29:03 crc kubenswrapper[4811]: I0128 16:29:03.093396 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:29:33 crc kubenswrapper[4811]: I0128 16:29:33.087797 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:29:33 crc kubenswrapper[4811]: I0128 16:29:33.089709 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.151909 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86"] Jan 28 16:30:00 crc kubenswrapper[4811]: E0128 16:30:00.152810 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="extract-utilities" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.152826 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="extract-utilities" Jan 28 16:30:00 crc kubenswrapper[4811]: E0128 16:30:00.152859 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="extract-content" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.152867 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="extract-content" Jan 28 16:30:00 crc kubenswrapper[4811]: E0128 16:30:00.152886 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="registry-server" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.152894 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="registry-server" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.153041 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8b8244f-85e7-4f95-84ef-bbbde5593d3f" containerName="registry-server" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.153640 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.155366 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.155448 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.170766 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86"] Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.306257 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-config-volume\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.306581 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw5bd\" (UniqueName: \"kubernetes.io/projected/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-kube-api-access-sw5bd\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.306649 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-secret-volume\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.408391 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-secret-volume\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.408532 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-config-volume\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.408574 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw5bd\" (UniqueName: \"kubernetes.io/projected/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-kube-api-access-sw5bd\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.409533 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-config-volume\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.421269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-secret-volume\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.426332 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw5bd\" (UniqueName: \"kubernetes.io/projected/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-kube-api-access-sw5bd\") pod \"collect-profiles-29493630-njk86\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.487562 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.908491 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86"] Jan 28 16:30:00 crc kubenswrapper[4811]: W0128 16:30:00.916998 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6cdc2d6_b33b_45b4_89ee_5cc3bb745442.slice/crio-7c52d992645f7ea2c989638c8f7729ad2a7f887da6d1ddf49b03651949a82564 WatchSource:0}: Error finding container 7c52d992645f7ea2c989638c8f7729ad2a7f887da6d1ddf49b03651949a82564: Status 404 returned error can't find the container with id 7c52d992645f7ea2c989638c8f7729ad2a7f887da6d1ddf49b03651949a82564 Jan 28 16:30:00 crc kubenswrapper[4811]: I0128 16:30:00.934350 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" event={"ID":"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442","Type":"ContainerStarted","Data":"7c52d992645f7ea2c989638c8f7729ad2a7f887da6d1ddf49b03651949a82564"} Jan 28 16:30:01 crc kubenswrapper[4811]: I0128 16:30:01.942551 4811 generic.go:334] "Generic (PLEG): container finished" podID="e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" containerID="854068093eb5667b6e160cae55ea3c917e1ef54848975ac41a508e0ada8ac6c0" exitCode=0 Jan 28 16:30:01 crc kubenswrapper[4811]: I0128 16:30:01.942607 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" event={"ID":"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442","Type":"ContainerDied","Data":"854068093eb5667b6e160cae55ea3c917e1ef54848975ac41a508e0ada8ac6c0"} Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.087368 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.087421 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.087491 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.088070 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bf9b4f2403fde04329fdc551c9926f4fafdc3da91e03d66ec5f8cf60e3e0b1f5"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.088134 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://bf9b4f2403fde04329fdc551c9926f4fafdc3da91e03d66ec5f8cf60e3e0b1f5" gracePeriod=600 Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.233047 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.346980 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw5bd\" (UniqueName: \"kubernetes.io/projected/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-kube-api-access-sw5bd\") pod \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.347044 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-secret-volume\") pod \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.347710 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-config-volume\") pod \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\" (UID: \"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442\") " Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.348217 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-config-volume" (OuterVolumeSpecName: "config-volume") pod "e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" (UID: "e6cdc2d6-b33b-45b4-89ee-5cc3bb745442"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.353444 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" (UID: "e6cdc2d6-b33b-45b4-89ee-5cc3bb745442"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.353885 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-kube-api-access-sw5bd" (OuterVolumeSpecName: "kube-api-access-sw5bd") pod "e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" (UID: "e6cdc2d6-b33b-45b4-89ee-5cc3bb745442"). InnerVolumeSpecName "kube-api-access-sw5bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.449954 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw5bd\" (UniqueName: \"kubernetes.io/projected/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-kube-api-access-sw5bd\") on node \"crc\" DevicePath \"\"" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.450360 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.450416 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.961373 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="bf9b4f2403fde04329fdc551c9926f4fafdc3da91e03d66ec5f8cf60e3e0b1f5" exitCode=0 Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.961458 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"bf9b4f2403fde04329fdc551c9926f4fafdc3da91e03d66ec5f8cf60e3e0b1f5"} Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.963397 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b"} Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.963502 4811 scope.go:117] "RemoveContainer" containerID="6354f4d56ac8ccab41aec8d744b7d40a4703f3f73187c0ee9dfe15ec4ec4fc7b" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.965202 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" event={"ID":"e6cdc2d6-b33b-45b4-89ee-5cc3bb745442","Type":"ContainerDied","Data":"7c52d992645f7ea2c989638c8f7729ad2a7f887da6d1ddf49b03651949a82564"} Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.965255 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86" Jan 28 16:30:03 crc kubenswrapper[4811]: I0128 16:30:03.965271 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c52d992645f7ea2c989638c8f7729ad2a7f887da6d1ddf49b03651949a82564" Jan 28 16:30:04 crc kubenswrapper[4811]: I0128 16:30:04.301824 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j"] Jan 28 16:30:04 crc kubenswrapper[4811]: I0128 16:30:04.308246 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493585-8md6j"] Jan 28 16:30:04 crc kubenswrapper[4811]: I0128 16:30:04.348192 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78a73da1-92b0-4724-90aa-1a8f5aa3e2ec" path="/var/lib/kubelet/pods/78a73da1-92b0-4724-90aa-1a8f5aa3e2ec/volumes" Jan 28 16:30:29 crc kubenswrapper[4811]: I0128 16:30:29.137497 4811 scope.go:117] "RemoveContainer" containerID="5bbce93fed947b56c71593c12958888367245730eb8aefe087e57cda86ee80bf" Jan 28 16:32:03 crc kubenswrapper[4811]: I0128 16:32:03.087296 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:32:03 crc kubenswrapper[4811]: I0128 16:32:03.087754 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.761498 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gkplb"] Jan 28 16:32:11 crc kubenswrapper[4811]: E0128 16:32:11.762377 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" containerName="collect-profiles" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.762392 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" containerName="collect-profiles" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.762584 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" containerName="collect-profiles" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.763845 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.787565 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gkplb"] Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.861643 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-utilities\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.861725 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-catalog-content\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.861774 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnn52\" (UniqueName: \"kubernetes.io/projected/b37394d3-1bd0-4fe6-8eba-ec006409698d-kube-api-access-qnn52\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.963141 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnn52\" (UniqueName: \"kubernetes.io/projected/b37394d3-1bd0-4fe6-8eba-ec006409698d-kube-api-access-qnn52\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.963241 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-utilities\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.963797 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-utilities\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.963902 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-catalog-content\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.964188 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-catalog-content\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:11 crc kubenswrapper[4811]: I0128 16:32:11.997886 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnn52\" (UniqueName: \"kubernetes.io/projected/b37394d3-1bd0-4fe6-8eba-ec006409698d-kube-api-access-qnn52\") pod \"certified-operators-gkplb\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:12 crc kubenswrapper[4811]: I0128 16:32:12.078520 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:12 crc kubenswrapper[4811]: I0128 16:32:12.542727 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gkplb"] Jan 28 16:32:12 crc kubenswrapper[4811]: I0128 16:32:12.922373 4811 generic.go:334] "Generic (PLEG): container finished" podID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerID="3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702" exitCode=0 Jan 28 16:32:12 crc kubenswrapper[4811]: I0128 16:32:12.922517 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerDied","Data":"3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702"} Jan 28 16:32:12 crc kubenswrapper[4811]: I0128 16:32:12.922729 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerStarted","Data":"f804c81492372f85e7ed34e01cb751452fa546e130998d310366551c4635537e"} Jan 28 16:32:12 crc kubenswrapper[4811]: I0128 16:32:12.924608 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:32:13 crc kubenswrapper[4811]: I0128 16:32:13.928989 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerStarted","Data":"d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d"} Jan 28 16:32:14 crc kubenswrapper[4811]: I0128 16:32:14.935908 4811 generic.go:334] "Generic (PLEG): container finished" podID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerID="d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d" exitCode=0 Jan 28 16:32:14 crc kubenswrapper[4811]: I0128 16:32:14.935974 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerDied","Data":"d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d"} Jan 28 16:32:15 crc kubenswrapper[4811]: I0128 16:32:15.950312 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerStarted","Data":"b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478"} Jan 28 16:32:15 crc kubenswrapper[4811]: I0128 16:32:15.982850 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gkplb" podStartSLOduration=2.573446084 podStartE2EDuration="4.98283235s" podCreationTimestamp="2026-01-28 16:32:11 +0000 UTC" firstStartedPulling="2026-01-28 16:32:12.924197229 +0000 UTC m=+2825.678560842" lastFinishedPulling="2026-01-28 16:32:15.333583505 +0000 UTC m=+2828.087947108" observedRunningTime="2026-01-28 16:32:15.980382383 +0000 UTC m=+2828.734745966" watchObservedRunningTime="2026-01-28 16:32:15.98283235 +0000 UTC m=+2828.737195953" Jan 28 16:32:22 crc kubenswrapper[4811]: I0128 16:32:22.078919 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:22 crc kubenswrapper[4811]: I0128 16:32:22.080109 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:22 crc kubenswrapper[4811]: I0128 16:32:22.117630 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:23 crc kubenswrapper[4811]: I0128 16:32:23.073046 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:23 crc kubenswrapper[4811]: I0128 16:32:23.145750 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gkplb"] Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.012551 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gkplb" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="registry-server" containerID="cri-o://b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478" gracePeriod=2 Jan 28 16:32:25 crc kubenswrapper[4811]: E0128 16:32:25.093500 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb37394d3_1bd0_4fe6_8eba_ec006409698d.slice/crio-b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478.scope\": RecentStats: unable to find data in memory cache]" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.416960 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.473622 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnn52\" (UniqueName: \"kubernetes.io/projected/b37394d3-1bd0-4fe6-8eba-ec006409698d-kube-api-access-qnn52\") pod \"b37394d3-1bd0-4fe6-8eba-ec006409698d\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.473715 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-catalog-content\") pod \"b37394d3-1bd0-4fe6-8eba-ec006409698d\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.473849 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-utilities\") pod \"b37394d3-1bd0-4fe6-8eba-ec006409698d\" (UID: \"b37394d3-1bd0-4fe6-8eba-ec006409698d\") " Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.474822 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-utilities" (OuterVolumeSpecName: "utilities") pod "b37394d3-1bd0-4fe6-8eba-ec006409698d" (UID: "b37394d3-1bd0-4fe6-8eba-ec006409698d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.481634 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b37394d3-1bd0-4fe6-8eba-ec006409698d-kube-api-access-qnn52" (OuterVolumeSpecName: "kube-api-access-qnn52") pod "b37394d3-1bd0-4fe6-8eba-ec006409698d" (UID: "b37394d3-1bd0-4fe6-8eba-ec006409698d"). InnerVolumeSpecName "kube-api-access-qnn52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.531740 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b37394d3-1bd0-4fe6-8eba-ec006409698d" (UID: "b37394d3-1bd0-4fe6-8eba-ec006409698d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.575749 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.575801 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnn52\" (UniqueName: \"kubernetes.io/projected/b37394d3-1bd0-4fe6-8eba-ec006409698d-kube-api-access-qnn52\") on node \"crc\" DevicePath \"\"" Jan 28 16:32:25 crc kubenswrapper[4811]: I0128 16:32:25.575816 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37394d3-1bd0-4fe6-8eba-ec006409698d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.026879 4811 generic.go:334] "Generic (PLEG): container finished" podID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerID="b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478" exitCode=0 Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.026930 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerDied","Data":"b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478"} Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.026960 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gkplb" event={"ID":"b37394d3-1bd0-4fe6-8eba-ec006409698d","Type":"ContainerDied","Data":"f804c81492372f85e7ed34e01cb751452fa546e130998d310366551c4635537e"} Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.026962 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gkplb" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.027006 4811 scope.go:117] "RemoveContainer" containerID="b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.066597 4811 scope.go:117] "RemoveContainer" containerID="d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.068559 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gkplb"] Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.074263 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gkplb"] Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.109642 4811 scope.go:117] "RemoveContainer" containerID="3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.129992 4811 scope.go:117] "RemoveContainer" containerID="b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478" Jan 28 16:32:26 crc kubenswrapper[4811]: E0128 16:32:26.131104 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478\": container with ID starting with b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478 not found: ID does not exist" containerID="b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.131154 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478"} err="failed to get container status \"b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478\": rpc error: code = NotFound desc = could not find container \"b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478\": container with ID starting with b8e65bbf4517494e3df5f254d286b8619d7ebba4bec2c2ab06b3efa26c6da478 not found: ID does not exist" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.131188 4811 scope.go:117] "RemoveContainer" containerID="d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d" Jan 28 16:32:26 crc kubenswrapper[4811]: E0128 16:32:26.131638 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d\": container with ID starting with d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d not found: ID does not exist" containerID="d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.131676 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d"} err="failed to get container status \"d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d\": rpc error: code = NotFound desc = could not find container \"d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d\": container with ID starting with d333f42e01602ad24ae996711ccc30624e3b63be27c2804cb98455d7a39f508d not found: ID does not exist" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.131705 4811 scope.go:117] "RemoveContainer" containerID="3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702" Jan 28 16:32:26 crc kubenswrapper[4811]: E0128 16:32:26.132026 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702\": container with ID starting with 3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702 not found: ID does not exist" containerID="3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.132089 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702"} err="failed to get container status \"3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702\": rpc error: code = NotFound desc = could not find container \"3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702\": container with ID starting with 3686866479fb656d7c56270d04f49a127c6993afa595d247412d0deca791d702 not found: ID does not exist" Jan 28 16:32:26 crc kubenswrapper[4811]: I0128 16:32:26.348928 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" path="/var/lib/kubelet/pods/b37394d3-1bd0-4fe6-8eba-ec006409698d/volumes" Jan 28 16:32:33 crc kubenswrapper[4811]: I0128 16:32:33.086918 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:32:33 crc kubenswrapper[4811]: I0128 16:32:33.087255 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.087810 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.088407 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.088518 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.089154 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.089226 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" gracePeriod=600 Jan 28 16:33:03 crc kubenswrapper[4811]: E0128 16:33:03.220874 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.311350 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" exitCode=0 Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.311399 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b"} Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.311460 4811 scope.go:117] "RemoveContainer" containerID="bf9b4f2403fde04329fdc551c9926f4fafdc3da91e03d66ec5f8cf60e3e0b1f5" Jan 28 16:33:03 crc kubenswrapper[4811]: I0128 16:33:03.312614 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:33:03 crc kubenswrapper[4811]: E0128 16:33:03.313407 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.195193 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rjmdt"] Jan 28 16:33:09 crc kubenswrapper[4811]: E0128 16:33:09.195958 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="registry-server" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.195971 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="registry-server" Jan 28 16:33:09 crc kubenswrapper[4811]: E0128 16:33:09.195984 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="extract-content" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.195993 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="extract-content" Jan 28 16:33:09 crc kubenswrapper[4811]: E0128 16:33:09.196010 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="extract-utilities" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.196017 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="extract-utilities" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.196141 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b37394d3-1bd0-4fe6-8eba-ec006409698d" containerName="registry-server" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.197049 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.214185 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rjmdt"] Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.278703 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6276x\" (UniqueName: \"kubernetes.io/projected/5142d5f1-3b1d-40ac-9e60-3123318cd4af-kube-api-access-6276x\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.278811 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-catalog-content\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.278838 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-utilities\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.379930 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6276x\" (UniqueName: \"kubernetes.io/projected/5142d5f1-3b1d-40ac-9e60-3123318cd4af-kube-api-access-6276x\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.380339 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-catalog-content\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.380480 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-utilities\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.380914 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-catalog-content\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.380994 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-utilities\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.402226 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6276x\" (UniqueName: \"kubernetes.io/projected/5142d5f1-3b1d-40ac-9e60-3123318cd4af-kube-api-access-6276x\") pod \"community-operators-rjmdt\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:09 crc kubenswrapper[4811]: I0128 16:33:09.513130 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:10 crc kubenswrapper[4811]: I0128 16:33:10.071393 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rjmdt"] Jan 28 16:33:10 crc kubenswrapper[4811]: I0128 16:33:10.376074 4811 generic.go:334] "Generic (PLEG): container finished" podID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerID="c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a" exitCode=0 Jan 28 16:33:10 crc kubenswrapper[4811]: I0128 16:33:10.376116 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerDied","Data":"c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a"} Jan 28 16:33:10 crc kubenswrapper[4811]: I0128 16:33:10.376145 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerStarted","Data":"737c70fb247dfd26fd26c3a0f6d68d452f62499098ccee98025efa948cd124d8"} Jan 28 16:33:11 crc kubenswrapper[4811]: I0128 16:33:11.384087 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerStarted","Data":"68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8"} Jan 28 16:33:12 crc kubenswrapper[4811]: I0128 16:33:12.393375 4811 generic.go:334] "Generic (PLEG): container finished" podID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerID="68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8" exitCode=0 Jan 28 16:33:12 crc kubenswrapper[4811]: I0128 16:33:12.393472 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerDied","Data":"68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8"} Jan 28 16:33:13 crc kubenswrapper[4811]: I0128 16:33:13.403536 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerStarted","Data":"a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff"} Jan 28 16:33:13 crc kubenswrapper[4811]: I0128 16:33:13.437552 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rjmdt" podStartSLOduration=1.949876054 podStartE2EDuration="4.437521238s" podCreationTimestamp="2026-01-28 16:33:09 +0000 UTC" firstStartedPulling="2026-01-28 16:33:10.37830433 +0000 UTC m=+2883.132667913" lastFinishedPulling="2026-01-28 16:33:12.865949524 +0000 UTC m=+2885.620313097" observedRunningTime="2026-01-28 16:33:13.433680303 +0000 UTC m=+2886.188043886" watchObservedRunningTime="2026-01-28 16:33:13.437521238 +0000 UTC m=+2886.191884821" Jan 28 16:33:16 crc kubenswrapper[4811]: I0128 16:33:16.340087 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:33:16 crc kubenswrapper[4811]: E0128 16:33:16.340664 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:33:19 crc kubenswrapper[4811]: I0128 16:33:19.513581 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:19 crc kubenswrapper[4811]: I0128 16:33:19.513878 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:19 crc kubenswrapper[4811]: I0128 16:33:19.563281 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:20 crc kubenswrapper[4811]: I0128 16:33:20.539687 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:20 crc kubenswrapper[4811]: I0128 16:33:20.596163 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rjmdt"] Jan 28 16:33:22 crc kubenswrapper[4811]: I0128 16:33:22.492575 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rjmdt" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="registry-server" containerID="cri-o://a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff" gracePeriod=2 Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.003550 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.087532 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-utilities\") pod \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.087675 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6276x\" (UniqueName: \"kubernetes.io/projected/5142d5f1-3b1d-40ac-9e60-3123318cd4af-kube-api-access-6276x\") pod \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.087696 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-catalog-content\") pod \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\" (UID: \"5142d5f1-3b1d-40ac-9e60-3123318cd4af\") " Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.088865 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-utilities" (OuterVolumeSpecName: "utilities") pod "5142d5f1-3b1d-40ac-9e60-3123318cd4af" (UID: "5142d5f1-3b1d-40ac-9e60-3123318cd4af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.094640 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5142d5f1-3b1d-40ac-9e60-3123318cd4af-kube-api-access-6276x" (OuterVolumeSpecName: "kube-api-access-6276x") pod "5142d5f1-3b1d-40ac-9e60-3123318cd4af" (UID: "5142d5f1-3b1d-40ac-9e60-3123318cd4af"). InnerVolumeSpecName "kube-api-access-6276x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.139234 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5142d5f1-3b1d-40ac-9e60-3123318cd4af" (UID: "5142d5f1-3b1d-40ac-9e60-3123318cd4af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.189074 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6276x\" (UniqueName: \"kubernetes.io/projected/5142d5f1-3b1d-40ac-9e60-3123318cd4af-kube-api-access-6276x\") on node \"crc\" DevicePath \"\"" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.189113 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.189126 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5142d5f1-3b1d-40ac-9e60-3123318cd4af-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.502079 4811 generic.go:334] "Generic (PLEG): container finished" podID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerID="a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff" exitCode=0 Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.502121 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerDied","Data":"a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff"} Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.502165 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjmdt" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.502182 4811 scope.go:117] "RemoveContainer" containerID="a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.502169 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjmdt" event={"ID":"5142d5f1-3b1d-40ac-9e60-3123318cd4af","Type":"ContainerDied","Data":"737c70fb247dfd26fd26c3a0f6d68d452f62499098ccee98025efa948cd124d8"} Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.530782 4811 scope.go:117] "RemoveContainer" containerID="68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.554841 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rjmdt"] Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.558754 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rjmdt"] Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.562396 4811 scope.go:117] "RemoveContainer" containerID="c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.584889 4811 scope.go:117] "RemoveContainer" containerID="a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff" Jan 28 16:33:23 crc kubenswrapper[4811]: E0128 16:33:23.585298 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff\": container with ID starting with a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff not found: ID does not exist" containerID="a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.585345 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff"} err="failed to get container status \"a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff\": rpc error: code = NotFound desc = could not find container \"a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff\": container with ID starting with a0c97da665b02dfb0dec17e1995895d08c89287096bd4233803721141260cdff not found: ID does not exist" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.585378 4811 scope.go:117] "RemoveContainer" containerID="68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8" Jan 28 16:33:23 crc kubenswrapper[4811]: E0128 16:33:23.585729 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8\": container with ID starting with 68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8 not found: ID does not exist" containerID="68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.585763 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8"} err="failed to get container status \"68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8\": rpc error: code = NotFound desc = could not find container \"68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8\": container with ID starting with 68dcbb5b291a50e3a59984b9988759c6397e058bd3beb44f4880355a8862eae8 not found: ID does not exist" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.585781 4811 scope.go:117] "RemoveContainer" containerID="c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a" Jan 28 16:33:23 crc kubenswrapper[4811]: E0128 16:33:23.586025 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a\": container with ID starting with c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a not found: ID does not exist" containerID="c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a" Jan 28 16:33:23 crc kubenswrapper[4811]: I0128 16:33:23.586060 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a"} err="failed to get container status \"c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a\": rpc error: code = NotFound desc = could not find container \"c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a\": container with ID starting with c584027a2145a0cc34072fac9f5e0f892725ce554d8a4fd8b8b4df2aefb2945a not found: ID does not exist" Jan 28 16:33:24 crc kubenswrapper[4811]: I0128 16:33:24.349688 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" path="/var/lib/kubelet/pods/5142d5f1-3b1d-40ac-9e60-3123318cd4af/volumes" Jan 28 16:33:27 crc kubenswrapper[4811]: I0128 16:33:27.340243 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:33:27 crc kubenswrapper[4811]: E0128 16:33:27.342626 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:33:40 crc kubenswrapper[4811]: I0128 16:33:40.340375 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:33:40 crc kubenswrapper[4811]: E0128 16:33:40.341136 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:33:51 crc kubenswrapper[4811]: I0128 16:33:51.340084 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:33:51 crc kubenswrapper[4811]: E0128 16:33:51.340798 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:34:06 crc kubenswrapper[4811]: I0128 16:34:06.339562 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:34:06 crc kubenswrapper[4811]: E0128 16:34:06.340361 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:34:21 crc kubenswrapper[4811]: I0128 16:34:21.339792 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:34:21 crc kubenswrapper[4811]: E0128 16:34:21.340384 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:34:33 crc kubenswrapper[4811]: I0128 16:34:33.338836 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:34:33 crc kubenswrapper[4811]: E0128 16:34:33.339546 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:34:48 crc kubenswrapper[4811]: I0128 16:34:48.339721 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:34:48 crc kubenswrapper[4811]: E0128 16:34:48.340603 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:34:59 crc kubenswrapper[4811]: I0128 16:34:59.339521 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:34:59 crc kubenswrapper[4811]: E0128 16:34:59.340564 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:35:14 crc kubenswrapper[4811]: I0128 16:35:14.339738 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:35:14 crc kubenswrapper[4811]: E0128 16:35:14.341401 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:35:28 crc kubenswrapper[4811]: I0128 16:35:28.346774 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:35:28 crc kubenswrapper[4811]: E0128 16:35:28.347742 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:35:39 crc kubenswrapper[4811]: I0128 16:35:39.339139 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:35:39 crc kubenswrapper[4811]: E0128 16:35:39.339954 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:35:52 crc kubenswrapper[4811]: I0128 16:35:52.339239 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:35:52 crc kubenswrapper[4811]: E0128 16:35:52.340295 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:36:04 crc kubenswrapper[4811]: I0128 16:36:04.339511 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:36:04 crc kubenswrapper[4811]: E0128 16:36:04.340244 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:36:19 crc kubenswrapper[4811]: I0128 16:36:19.340211 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:36:19 crc kubenswrapper[4811]: E0128 16:36:19.342530 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:36:34 crc kubenswrapper[4811]: I0128 16:36:34.340007 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:36:34 crc kubenswrapper[4811]: E0128 16:36:34.341252 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.010644 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wsn6v"] Jan 28 16:36:42 crc kubenswrapper[4811]: E0128 16:36:42.011686 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="registry-server" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.011704 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="registry-server" Jan 28 16:36:42 crc kubenswrapper[4811]: E0128 16:36:42.011733 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="extract-content" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.011741 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="extract-content" Jan 28 16:36:42 crc kubenswrapper[4811]: E0128 16:36:42.011775 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="extract-utilities" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.011785 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="extract-utilities" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.011945 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5142d5f1-3b1d-40ac-9e60-3123318cd4af" containerName="registry-server" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.013169 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.050613 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wsn6v"] Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.121331 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-catalog-content\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.121587 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-utilities\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.121747 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5q99\" (UniqueName: \"kubernetes.io/projected/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-kube-api-access-f5q99\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.223696 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-catalog-content\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.223763 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-utilities\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.223814 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5q99\" (UniqueName: \"kubernetes.io/projected/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-kube-api-access-f5q99\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.224345 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-utilities\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.224602 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-catalog-content\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.243543 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5q99\" (UniqueName: \"kubernetes.io/projected/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-kube-api-access-f5q99\") pod \"redhat-operators-wsn6v\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.342470 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:42 crc kubenswrapper[4811]: I0128 16:36:42.790243 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wsn6v"] Jan 28 16:36:43 crc kubenswrapper[4811]: I0128 16:36:43.103493 4811 generic.go:334] "Generic (PLEG): container finished" podID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerID="d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93" exitCode=0 Jan 28 16:36:43 crc kubenswrapper[4811]: I0128 16:36:43.103587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsn6v" event={"ID":"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe","Type":"ContainerDied","Data":"d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93"} Jan 28 16:36:43 crc kubenswrapper[4811]: I0128 16:36:43.103854 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsn6v" event={"ID":"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe","Type":"ContainerStarted","Data":"40c068d700f2d61128a8e2dc3edcdf6ce028282e8047c2634f8fd5f99bcda1fc"} Jan 28 16:36:45 crc kubenswrapper[4811]: I0128 16:36:45.122933 4811 generic.go:334] "Generic (PLEG): container finished" podID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerID="a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be" exitCode=0 Jan 28 16:36:45 crc kubenswrapper[4811]: I0128 16:36:45.123024 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsn6v" event={"ID":"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe","Type":"ContainerDied","Data":"a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be"} Jan 28 16:36:46 crc kubenswrapper[4811]: I0128 16:36:46.132420 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsn6v" event={"ID":"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe","Type":"ContainerStarted","Data":"115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327"} Jan 28 16:36:46 crc kubenswrapper[4811]: I0128 16:36:46.154766 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wsn6v" podStartSLOduration=2.717039366 podStartE2EDuration="5.154741466s" podCreationTimestamp="2026-01-28 16:36:41 +0000 UTC" firstStartedPulling="2026-01-28 16:36:43.105277055 +0000 UTC m=+3095.859640638" lastFinishedPulling="2026-01-28 16:36:45.542979145 +0000 UTC m=+3098.297342738" observedRunningTime="2026-01-28 16:36:46.150088899 +0000 UTC m=+3098.904452492" watchObservedRunningTime="2026-01-28 16:36:46.154741466 +0000 UTC m=+3098.909105049" Jan 28 16:36:47 crc kubenswrapper[4811]: I0128 16:36:47.339464 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:36:47 crc kubenswrapper[4811]: E0128 16:36:47.339847 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:36:52 crc kubenswrapper[4811]: I0128 16:36:52.357874 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:52 crc kubenswrapper[4811]: I0128 16:36:52.360220 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:36:53 crc kubenswrapper[4811]: I0128 16:36:53.417681 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wsn6v" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="registry-server" probeResult="failure" output=< Jan 28 16:36:53 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 16:36:53 crc kubenswrapper[4811]: > Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.381278 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xvpbb"] Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.382913 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.398048 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvpbb"] Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.487275 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-catalog-content\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.487697 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwfw7\" (UniqueName: \"kubernetes.io/projected/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-kube-api-access-wwfw7\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.487799 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-utilities\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.589006 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwfw7\" (UniqueName: \"kubernetes.io/projected/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-kube-api-access-wwfw7\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.589372 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-utilities\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.589515 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-catalog-content\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.589972 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-utilities\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.589980 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-catalog-content\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.610646 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwfw7\" (UniqueName: \"kubernetes.io/projected/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-kube-api-access-wwfw7\") pod \"redhat-marketplace-xvpbb\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.705201 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:36:59 crc kubenswrapper[4811]: I0128 16:36:59.972844 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvpbb"] Jan 28 16:36:59 crc kubenswrapper[4811]: W0128 16:36:59.979213 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60aeaf91_37ad_49d7_b721_c5cf1e387f1f.slice/crio-c440d9f410ec0b23d4355b3d170bf4282ca75b4452d148806bc2951c34d69de1 WatchSource:0}: Error finding container c440d9f410ec0b23d4355b3d170bf4282ca75b4452d148806bc2951c34d69de1: Status 404 returned error can't find the container with id c440d9f410ec0b23d4355b3d170bf4282ca75b4452d148806bc2951c34d69de1 Jan 28 16:37:00 crc kubenswrapper[4811]: I0128 16:37:00.245925 4811 generic.go:334] "Generic (PLEG): container finished" podID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerID="f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1" exitCode=0 Jan 28 16:37:00 crc kubenswrapper[4811]: I0128 16:37:00.245972 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvpbb" event={"ID":"60aeaf91-37ad-49d7-b721-c5cf1e387f1f","Type":"ContainerDied","Data":"f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1"} Jan 28 16:37:00 crc kubenswrapper[4811]: I0128 16:37:00.245997 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvpbb" event={"ID":"60aeaf91-37ad-49d7-b721-c5cf1e387f1f","Type":"ContainerStarted","Data":"c440d9f410ec0b23d4355b3d170bf4282ca75b4452d148806bc2951c34d69de1"} Jan 28 16:37:01 crc kubenswrapper[4811]: I0128 16:37:01.339663 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:37:01 crc kubenswrapper[4811]: E0128 16:37:01.340337 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:37:02 crc kubenswrapper[4811]: I0128 16:37:02.265406 4811 generic.go:334] "Generic (PLEG): container finished" podID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerID="d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd" exitCode=0 Jan 28 16:37:02 crc kubenswrapper[4811]: I0128 16:37:02.265472 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvpbb" event={"ID":"60aeaf91-37ad-49d7-b721-c5cf1e387f1f","Type":"ContainerDied","Data":"d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd"} Jan 28 16:37:02 crc kubenswrapper[4811]: I0128 16:37:02.393021 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:37:02 crc kubenswrapper[4811]: I0128 16:37:02.442485 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:37:03 crc kubenswrapper[4811]: I0128 16:37:03.275134 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvpbb" event={"ID":"60aeaf91-37ad-49d7-b721-c5cf1e387f1f","Type":"ContainerStarted","Data":"bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c"} Jan 28 16:37:03 crc kubenswrapper[4811]: I0128 16:37:03.303631 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xvpbb" podStartSLOduration=1.850528365 podStartE2EDuration="4.303610274s" podCreationTimestamp="2026-01-28 16:36:59 +0000 UTC" firstStartedPulling="2026-01-28 16:37:00.247607585 +0000 UTC m=+3113.001971178" lastFinishedPulling="2026-01-28 16:37:02.700689504 +0000 UTC m=+3115.455053087" observedRunningTime="2026-01-28 16:37:03.297565009 +0000 UTC m=+3116.051928612" watchObservedRunningTime="2026-01-28 16:37:03.303610274 +0000 UTC m=+3116.057973877" Jan 28 16:37:04 crc kubenswrapper[4811]: I0128 16:37:04.769069 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wsn6v"] Jan 28 16:37:04 crc kubenswrapper[4811]: I0128 16:37:04.769637 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wsn6v" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="registry-server" containerID="cri-o://115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327" gracePeriod=2 Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.189264 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.289418 4811 generic.go:334] "Generic (PLEG): container finished" podID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerID="115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327" exitCode=0 Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.289466 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsn6v" event={"ID":"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe","Type":"ContainerDied","Data":"115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327"} Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.289510 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsn6v" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.289524 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsn6v" event={"ID":"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe","Type":"ContainerDied","Data":"40c068d700f2d61128a8e2dc3edcdf6ce028282e8047c2634f8fd5f99bcda1fc"} Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.289543 4811 scope.go:117] "RemoveContainer" containerID="115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.303897 4811 scope.go:117] "RemoveContainer" containerID="a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.328286 4811 scope.go:117] "RemoveContainer" containerID="d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.343480 4811 scope.go:117] "RemoveContainer" containerID="115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327" Jan 28 16:37:05 crc kubenswrapper[4811]: E0128 16:37:05.343955 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327\": container with ID starting with 115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327 not found: ID does not exist" containerID="115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.344009 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327"} err="failed to get container status \"115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327\": rpc error: code = NotFound desc = could not find container \"115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327\": container with ID starting with 115cdbf5c71eaa19cfb94af2705281e45e5d1b26950f539cdb0258fbd23eb327 not found: ID does not exist" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.344035 4811 scope.go:117] "RemoveContainer" containerID="a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be" Jan 28 16:37:05 crc kubenswrapper[4811]: E0128 16:37:05.344456 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be\": container with ID starting with a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be not found: ID does not exist" containerID="a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.344475 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be"} err="failed to get container status \"a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be\": rpc error: code = NotFound desc = could not find container \"a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be\": container with ID starting with a8de1860bb67e56fb8995086fb60cf0b7f0c05c7060be21b76e8b16512c987be not found: ID does not exist" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.344487 4811 scope.go:117] "RemoveContainer" containerID="d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93" Jan 28 16:37:05 crc kubenswrapper[4811]: E0128 16:37:05.344811 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93\": container with ID starting with d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93 not found: ID does not exist" containerID="d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.344832 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93"} err="failed to get container status \"d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93\": rpc error: code = NotFound desc = could not find container \"d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93\": container with ID starting with d2f95f072e567ad255786b920604ca3ea932038bd6ca436468e42fbc10371e93 not found: ID does not exist" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.369379 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-catalog-content\") pod \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.369477 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5q99\" (UniqueName: \"kubernetes.io/projected/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-kube-api-access-f5q99\") pod \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.369511 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-utilities\") pod \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\" (UID: \"84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe\") " Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.370454 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-utilities" (OuterVolumeSpecName: "utilities") pod "84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" (UID: "84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.374941 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-kube-api-access-f5q99" (OuterVolumeSpecName: "kube-api-access-f5q99") pod "84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" (UID: "84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe"). InnerVolumeSpecName "kube-api-access-f5q99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.470802 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5q99\" (UniqueName: \"kubernetes.io/projected/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-kube-api-access-f5q99\") on node \"crc\" DevicePath \"\"" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.470854 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.509979 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" (UID: "84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.571927 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.628796 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wsn6v"] Jan 28 16:37:05 crc kubenswrapper[4811]: I0128 16:37:05.633384 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wsn6v"] Jan 28 16:37:06 crc kubenswrapper[4811]: I0128 16:37:06.348790 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" path="/var/lib/kubelet/pods/84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe/volumes" Jan 28 16:37:09 crc kubenswrapper[4811]: I0128 16:37:09.705230 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:37:09 crc kubenswrapper[4811]: I0128 16:37:09.705569 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:37:09 crc kubenswrapper[4811]: I0128 16:37:09.748208 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:37:10 crc kubenswrapper[4811]: I0128 16:37:10.376382 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:37:10 crc kubenswrapper[4811]: I0128 16:37:10.436104 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvpbb"] Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.342192 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xvpbb" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="registry-server" containerID="cri-o://bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c" gracePeriod=2 Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.835600 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.983287 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-catalog-content\") pod \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.983352 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-utilities\") pod \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.983513 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwfw7\" (UniqueName: \"kubernetes.io/projected/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-kube-api-access-wwfw7\") pod \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\" (UID: \"60aeaf91-37ad-49d7-b721-c5cf1e387f1f\") " Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.984820 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-utilities" (OuterVolumeSpecName: "utilities") pod "60aeaf91-37ad-49d7-b721-c5cf1e387f1f" (UID: "60aeaf91-37ad-49d7-b721-c5cf1e387f1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:37:12 crc kubenswrapper[4811]: I0128 16:37:12.991734 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-kube-api-access-wwfw7" (OuterVolumeSpecName: "kube-api-access-wwfw7") pod "60aeaf91-37ad-49d7-b721-c5cf1e387f1f" (UID: "60aeaf91-37ad-49d7-b721-c5cf1e387f1f"). InnerVolumeSpecName "kube-api-access-wwfw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.009254 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60aeaf91-37ad-49d7-b721-c5cf1e387f1f" (UID: "60aeaf91-37ad-49d7-b721-c5cf1e387f1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.086012 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwfw7\" (UniqueName: \"kubernetes.io/projected/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-kube-api-access-wwfw7\") on node \"crc\" DevicePath \"\"" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.086070 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.086089 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60aeaf91-37ad-49d7-b721-c5cf1e387f1f-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.353158 4811 generic.go:334] "Generic (PLEG): container finished" podID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerID="bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c" exitCode=0 Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.353218 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvpbb" event={"ID":"60aeaf91-37ad-49d7-b721-c5cf1e387f1f","Type":"ContainerDied","Data":"bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c"} Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.353257 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvpbb" event={"ID":"60aeaf91-37ad-49d7-b721-c5cf1e387f1f","Type":"ContainerDied","Data":"c440d9f410ec0b23d4355b3d170bf4282ca75b4452d148806bc2951c34d69de1"} Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.353285 4811 scope.go:117] "RemoveContainer" containerID="bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.353339 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvpbb" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.382810 4811 scope.go:117] "RemoveContainer" containerID="d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.410657 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvpbb"] Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.411709 4811 scope.go:117] "RemoveContainer" containerID="f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.415627 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvpbb"] Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.438401 4811 scope.go:117] "RemoveContainer" containerID="bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c" Jan 28 16:37:13 crc kubenswrapper[4811]: E0128 16:37:13.438975 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c\": container with ID starting with bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c not found: ID does not exist" containerID="bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.439022 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c"} err="failed to get container status \"bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c\": rpc error: code = NotFound desc = could not find container \"bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c\": container with ID starting with bfc10afeb86398a3870fd31ad576b890d3c0aa031f41a4b993636d1d72aafd7c not found: ID does not exist" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.439043 4811 scope.go:117] "RemoveContainer" containerID="d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd" Jan 28 16:37:13 crc kubenswrapper[4811]: E0128 16:37:13.439643 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd\": container with ID starting with d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd not found: ID does not exist" containerID="d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.439700 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd"} err="failed to get container status \"d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd\": rpc error: code = NotFound desc = could not find container \"d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd\": container with ID starting with d7dc790fbbe140319188bf00aa3078caf0126cdd73d402d0d79123ba6bcdd9dd not found: ID does not exist" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.439714 4811 scope.go:117] "RemoveContainer" containerID="f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1" Jan 28 16:37:13 crc kubenswrapper[4811]: E0128 16:37:13.439980 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1\": container with ID starting with f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1 not found: ID does not exist" containerID="f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1" Jan 28 16:37:13 crc kubenswrapper[4811]: I0128 16:37:13.440000 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1"} err="failed to get container status \"f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1\": rpc error: code = NotFound desc = could not find container \"f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1\": container with ID starting with f9fa7ce81680b33c1f4c87bbacee90cdda621d6bf0db1e2dc04f8b96e0de73c1 not found: ID does not exist" Jan 28 16:37:14 crc kubenswrapper[4811]: I0128 16:37:14.349021 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" path="/var/lib/kubelet/pods/60aeaf91-37ad-49d7-b721-c5cf1e387f1f/volumes" Jan 28 16:37:15 crc kubenswrapper[4811]: I0128 16:37:15.340040 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:37:15 crc kubenswrapper[4811]: E0128 16:37:15.340494 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:37:29 crc kubenswrapper[4811]: I0128 16:37:29.340950 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:37:29 crc kubenswrapper[4811]: E0128 16:37:29.343792 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:37:44 crc kubenswrapper[4811]: I0128 16:37:44.339339 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:37:44 crc kubenswrapper[4811]: E0128 16:37:44.341046 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:37:58 crc kubenswrapper[4811]: I0128 16:37:58.344599 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:37:58 crc kubenswrapper[4811]: E0128 16:37:58.345729 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:38:10 crc kubenswrapper[4811]: I0128 16:38:10.340198 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:38:10 crc kubenswrapper[4811]: I0128 16:38:10.801320 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"41313aa46ebf1a83bdff88072d5c0f56155fe00e8db31998639dcd516b2bee16"} Jan 28 16:40:33 crc kubenswrapper[4811]: I0128 16:40:33.087196 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:40:33 crc kubenswrapper[4811]: I0128 16:40:33.088063 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:41:03 crc kubenswrapper[4811]: I0128 16:41:03.087334 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:41:03 crc kubenswrapper[4811]: I0128 16:41:03.088141 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.087195 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.089708 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.089772 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.090622 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"41313aa46ebf1a83bdff88072d5c0f56155fe00e8db31998639dcd516b2bee16"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.090763 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://41313aa46ebf1a83bdff88072d5c0f56155fe00e8db31998639dcd516b2bee16" gracePeriod=600 Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.497217 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="41313aa46ebf1a83bdff88072d5c0f56155fe00e8db31998639dcd516b2bee16" exitCode=0 Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.497421 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"41313aa46ebf1a83bdff88072d5c0f56155fe00e8db31998639dcd516b2bee16"} Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.497613 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8"} Jan 28 16:41:33 crc kubenswrapper[4811]: I0128 16:41:33.497641 4811 scope.go:117] "RemoveContainer" containerID="1ccfa1cd695e65c777a56a197ee5578a56a866d6b7b888bc4fc5cc2fabcbdf4b" Jan 28 16:43:33 crc kubenswrapper[4811]: I0128 16:43:33.087333 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:43:33 crc kubenswrapper[4811]: I0128 16:43:33.088013 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.415841 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bz72k"] Jan 28 16:43:34 crc kubenswrapper[4811]: E0128 16:43:34.416114 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="registry-server" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416127 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="registry-server" Jan 28 16:43:34 crc kubenswrapper[4811]: E0128 16:43:34.416135 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="extract-utilities" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416140 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="extract-utilities" Jan 28 16:43:34 crc kubenswrapper[4811]: E0128 16:43:34.416158 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="extract-utilities" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416165 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="extract-utilities" Jan 28 16:43:34 crc kubenswrapper[4811]: E0128 16:43:34.416173 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="extract-content" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416179 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="extract-content" Jan 28 16:43:34 crc kubenswrapper[4811]: E0128 16:43:34.416189 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="registry-server" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416195 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="registry-server" Jan 28 16:43:34 crc kubenswrapper[4811]: E0128 16:43:34.416209 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="extract-content" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416214 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="extract-content" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416331 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="60aeaf91-37ad-49d7-b721-c5cf1e387f1f" containerName="registry-server" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.416350 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a36e55-f31c-4b9a-8f1d-d261c2bf1bbe" containerName="registry-server" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.417270 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.443912 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bz72k"] Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.868509 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqcq9\" (UniqueName: \"kubernetes.io/projected/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-kube-api-access-lqcq9\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.868589 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-utilities\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.868662 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-catalog-content\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.971769 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-catalog-content\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.971958 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqcq9\" (UniqueName: \"kubernetes.io/projected/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-kube-api-access-lqcq9\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.971990 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-utilities\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.972846 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-utilities\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:34 crc kubenswrapper[4811]: I0128 16:43:34.973374 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-catalog-content\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.020051 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqcq9\" (UniqueName: \"kubernetes.io/projected/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-kube-api-access-lqcq9\") pod \"certified-operators-bz72k\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.034980 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.503671 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bz72k"] Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.982902 4811 generic.go:334] "Generic (PLEG): container finished" podID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerID="9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf" exitCode=0 Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.983008 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerDied","Data":"9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf"} Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.983333 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerStarted","Data":"b6131cb1387a661108ecf9a86fa70865aed9071541b4c6c967521ed2f409a6e4"} Jan 28 16:43:35 crc kubenswrapper[4811]: I0128 16:43:35.985053 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:43:36 crc kubenswrapper[4811]: I0128 16:43:36.993709 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerStarted","Data":"9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908"} Jan 28 16:43:38 crc kubenswrapper[4811]: I0128 16:43:38.003126 4811 generic.go:334] "Generic (PLEG): container finished" podID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerID="9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908" exitCode=0 Jan 28 16:43:38 crc kubenswrapper[4811]: I0128 16:43:38.003205 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerDied","Data":"9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908"} Jan 28 16:43:39 crc kubenswrapper[4811]: I0128 16:43:39.015647 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerStarted","Data":"30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736"} Jan 28 16:43:39 crc kubenswrapper[4811]: I0128 16:43:39.041146 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bz72k" podStartSLOduration=2.612678708 podStartE2EDuration="5.041123108s" podCreationTimestamp="2026-01-28 16:43:34 +0000 UTC" firstStartedPulling="2026-01-28 16:43:35.984761068 +0000 UTC m=+3508.739124661" lastFinishedPulling="2026-01-28 16:43:38.413205468 +0000 UTC m=+3511.167569061" observedRunningTime="2026-01-28 16:43:39.033892822 +0000 UTC m=+3511.788256425" watchObservedRunningTime="2026-01-28 16:43:39.041123108 +0000 UTC m=+3511.795486701" Jan 28 16:43:45 crc kubenswrapper[4811]: I0128 16:43:45.036046 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:45 crc kubenswrapper[4811]: I0128 16:43:45.036667 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:45 crc kubenswrapper[4811]: I0128 16:43:45.086412 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:45 crc kubenswrapper[4811]: I0128 16:43:45.151344 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:45 crc kubenswrapper[4811]: I0128 16:43:45.324006 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bz72k"] Jan 28 16:43:47 crc kubenswrapper[4811]: I0128 16:43:47.080253 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bz72k" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="registry-server" containerID="cri-o://30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736" gracePeriod=2 Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.071689 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.092520 4811 generic.go:334] "Generic (PLEG): container finished" podID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerID="30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736" exitCode=0 Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.092562 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerDied","Data":"30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736"} Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.092590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz72k" event={"ID":"a7773d32-f6a6-4902-b4fc-e0e6839c71aa","Type":"ContainerDied","Data":"b6131cb1387a661108ecf9a86fa70865aed9071541b4c6c967521ed2f409a6e4"} Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.092609 4811 scope.go:117] "RemoveContainer" containerID="30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.092752 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz72k" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.118916 4811 scope.go:117] "RemoveContainer" containerID="9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.152872 4811 scope.go:117] "RemoveContainer" containerID="9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.178244 4811 scope.go:117] "RemoveContainer" containerID="30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736" Jan 28 16:43:48 crc kubenswrapper[4811]: E0128 16:43:48.183060 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736\": container with ID starting with 30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736 not found: ID does not exist" containerID="30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.183123 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736"} err="failed to get container status \"30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736\": rpc error: code = NotFound desc = could not find container \"30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736\": container with ID starting with 30b4116e11926bcc78c8524017dccbbff7a2d799a3edec35d8a70c9900127736 not found: ID does not exist" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.183167 4811 scope.go:117] "RemoveContainer" containerID="9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908" Jan 28 16:43:48 crc kubenswrapper[4811]: E0128 16:43:48.183944 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908\": container with ID starting with 9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908 not found: ID does not exist" containerID="9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.184014 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908"} err="failed to get container status \"9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908\": rpc error: code = NotFound desc = could not find container \"9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908\": container with ID starting with 9c7b65fa071cd031a2b21713d66e81f33a33cc5db447d594433be378c1b6c908 not found: ID does not exist" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.184055 4811 scope.go:117] "RemoveContainer" containerID="9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf" Jan 28 16:43:48 crc kubenswrapper[4811]: E0128 16:43:48.184393 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf\": container with ID starting with 9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf not found: ID does not exist" containerID="9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.184461 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf"} err="failed to get container status \"9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf\": rpc error: code = NotFound desc = could not find container \"9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf\": container with ID starting with 9aba5dc8a14d4b81d95a808c30e25c14e49440270ff433a4e7c8ed349efa49cf not found: ID does not exist" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.260128 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqcq9\" (UniqueName: \"kubernetes.io/projected/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-kube-api-access-lqcq9\") pod \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.260201 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-utilities\") pod \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.260305 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-catalog-content\") pod \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\" (UID: \"a7773d32-f6a6-4902-b4fc-e0e6839c71aa\") " Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.261237 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-utilities" (OuterVolumeSpecName: "utilities") pod "a7773d32-f6a6-4902-b4fc-e0e6839c71aa" (UID: "a7773d32-f6a6-4902-b4fc-e0e6839c71aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.266840 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-kube-api-access-lqcq9" (OuterVolumeSpecName: "kube-api-access-lqcq9") pod "a7773d32-f6a6-4902-b4fc-e0e6839c71aa" (UID: "a7773d32-f6a6-4902-b4fc-e0e6839c71aa"). InnerVolumeSpecName "kube-api-access-lqcq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.340137 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7773d32-f6a6-4902-b4fc-e0e6839c71aa" (UID: "a7773d32-f6a6-4902-b4fc-e0e6839c71aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.361284 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.361317 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.361331 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqcq9\" (UniqueName: \"kubernetes.io/projected/a7773d32-f6a6-4902-b4fc-e0e6839c71aa-kube-api-access-lqcq9\") on node \"crc\" DevicePath \"\"" Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.426477 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bz72k"] Jan 28 16:43:48 crc kubenswrapper[4811]: I0128 16:43:48.436138 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bz72k"] Jan 28 16:43:50 crc kubenswrapper[4811]: I0128 16:43:50.365501 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" path="/var/lib/kubelet/pods/a7773d32-f6a6-4902-b4fc-e0e6839c71aa/volumes" Jan 28 16:44:03 crc kubenswrapper[4811]: I0128 16:44:03.087033 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:44:03 crc kubenswrapper[4811]: I0128 16:44:03.087592 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.086909 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.089358 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.091629 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.092659 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.092770 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" gracePeriod=600 Jan 28 16:44:33 crc kubenswrapper[4811]: E0128 16:44:33.227458 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.551820 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" exitCode=0 Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.551877 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8"} Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.551958 4811 scope.go:117] "RemoveContainer" containerID="41313aa46ebf1a83bdff88072d5c0f56155fe00e8db31998639dcd516b2bee16" Jan 28 16:44:33 crc kubenswrapper[4811]: I0128 16:44:33.552751 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:44:33 crc kubenswrapper[4811]: E0128 16:44:33.553348 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.030030 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qclb2"] Jan 28 16:44:35 crc kubenswrapper[4811]: E0128 16:44:35.030709 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="registry-server" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.030743 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="registry-server" Jan 28 16:44:35 crc kubenswrapper[4811]: E0128 16:44:35.030780 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="extract-utilities" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.030797 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="extract-utilities" Jan 28 16:44:35 crc kubenswrapper[4811]: E0128 16:44:35.030850 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="extract-content" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.030869 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="extract-content" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.031256 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7773d32-f6a6-4902-b4fc-e0e6839c71aa" containerName="registry-server" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.053379 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.087619 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qclb2"] Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.157819 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-catalog-content\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.157893 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-utilities\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.157953 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfvjt\" (UniqueName: \"kubernetes.io/projected/f232f42a-9f42-4a21-abd1-4aa8e008abda-kube-api-access-lfvjt\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.259674 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-catalog-content\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.259729 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-utilities\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.259776 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfvjt\" (UniqueName: \"kubernetes.io/projected/f232f42a-9f42-4a21-abd1-4aa8e008abda-kube-api-access-lfvjt\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.260559 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-catalog-content\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.260776 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-utilities\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.279503 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfvjt\" (UniqueName: \"kubernetes.io/projected/f232f42a-9f42-4a21-abd1-4aa8e008abda-kube-api-access-lfvjt\") pod \"community-operators-qclb2\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.399212 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:35 crc kubenswrapper[4811]: I0128 16:44:35.952655 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qclb2"] Jan 28 16:44:36 crc kubenswrapper[4811]: I0128 16:44:36.581534 4811 generic.go:334] "Generic (PLEG): container finished" podID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerID="87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d" exitCode=0 Jan 28 16:44:36 crc kubenswrapper[4811]: I0128 16:44:36.581957 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerDied","Data":"87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d"} Jan 28 16:44:36 crc kubenswrapper[4811]: I0128 16:44:36.582005 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerStarted","Data":"41df024caef180fdda1df9df88f64b95087ac2f5085b439bff98adfa733a17e7"} Jan 28 16:44:37 crc kubenswrapper[4811]: I0128 16:44:37.591141 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerStarted","Data":"bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182"} Jan 28 16:44:38 crc kubenswrapper[4811]: I0128 16:44:38.600104 4811 generic.go:334] "Generic (PLEG): container finished" podID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerID="bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182" exitCode=0 Jan 28 16:44:38 crc kubenswrapper[4811]: I0128 16:44:38.600187 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerDied","Data":"bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182"} Jan 28 16:44:39 crc kubenswrapper[4811]: I0128 16:44:39.609004 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerStarted","Data":"456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc"} Jan 28 16:44:39 crc kubenswrapper[4811]: I0128 16:44:39.654235 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qclb2" podStartSLOduration=3.270771578 podStartE2EDuration="5.654218356s" podCreationTimestamp="2026-01-28 16:44:34 +0000 UTC" firstStartedPulling="2026-01-28 16:44:36.585335436 +0000 UTC m=+3569.339699029" lastFinishedPulling="2026-01-28 16:44:38.968782224 +0000 UTC m=+3571.723145807" observedRunningTime="2026-01-28 16:44:39.652681865 +0000 UTC m=+3572.407045458" watchObservedRunningTime="2026-01-28 16:44:39.654218356 +0000 UTC m=+3572.408581939" Jan 28 16:44:45 crc kubenswrapper[4811]: I0128 16:44:45.399912 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:45 crc kubenswrapper[4811]: I0128 16:44:45.400806 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:45 crc kubenswrapper[4811]: I0128 16:44:45.475538 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:45 crc kubenswrapper[4811]: I0128 16:44:45.723726 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:45 crc kubenswrapper[4811]: I0128 16:44:45.782267 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qclb2"] Jan 28 16:44:47 crc kubenswrapper[4811]: I0128 16:44:47.339576 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:44:47 crc kubenswrapper[4811]: E0128 16:44:47.340035 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:44:47 crc kubenswrapper[4811]: I0128 16:44:47.677456 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qclb2" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="registry-server" containerID="cri-o://456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc" gracePeriod=2 Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.072474 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.153144 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-catalog-content\") pod \"f232f42a-9f42-4a21-abd1-4aa8e008abda\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.153219 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-utilities\") pod \"f232f42a-9f42-4a21-abd1-4aa8e008abda\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.153296 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfvjt\" (UniqueName: \"kubernetes.io/projected/f232f42a-9f42-4a21-abd1-4aa8e008abda-kube-api-access-lfvjt\") pod \"f232f42a-9f42-4a21-abd1-4aa8e008abda\" (UID: \"f232f42a-9f42-4a21-abd1-4aa8e008abda\") " Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.154133 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-utilities" (OuterVolumeSpecName: "utilities") pod "f232f42a-9f42-4a21-abd1-4aa8e008abda" (UID: "f232f42a-9f42-4a21-abd1-4aa8e008abda"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.159067 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f232f42a-9f42-4a21-abd1-4aa8e008abda-kube-api-access-lfvjt" (OuterVolumeSpecName: "kube-api-access-lfvjt") pod "f232f42a-9f42-4a21-abd1-4aa8e008abda" (UID: "f232f42a-9f42-4a21-abd1-4aa8e008abda"). InnerVolumeSpecName "kube-api-access-lfvjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.255165 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfvjt\" (UniqueName: \"kubernetes.io/projected/f232f42a-9f42-4a21-abd1-4aa8e008abda-kube-api-access-lfvjt\") on node \"crc\" DevicePath \"\"" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.255205 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.690753 4811 generic.go:334] "Generic (PLEG): container finished" podID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerID="456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc" exitCode=0 Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.690820 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerDied","Data":"456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc"} Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.690858 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qclb2" event={"ID":"f232f42a-9f42-4a21-abd1-4aa8e008abda","Type":"ContainerDied","Data":"41df024caef180fdda1df9df88f64b95087ac2f5085b439bff98adfa733a17e7"} Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.690886 4811 scope.go:117] "RemoveContainer" containerID="456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.690976 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qclb2" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.717205 4811 scope.go:117] "RemoveContainer" containerID="bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.741808 4811 scope.go:117] "RemoveContainer" containerID="87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.768771 4811 scope.go:117] "RemoveContainer" containerID="456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc" Jan 28 16:44:48 crc kubenswrapper[4811]: E0128 16:44:48.769279 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc\": container with ID starting with 456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc not found: ID does not exist" containerID="456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.769317 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc"} err="failed to get container status \"456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc\": rpc error: code = NotFound desc = could not find container \"456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc\": container with ID starting with 456b31b55f7708cabc4b9922dbb029dfd3c96dbb9f38d12dabd1179bf55cb6cc not found: ID does not exist" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.769343 4811 scope.go:117] "RemoveContainer" containerID="bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182" Jan 28 16:44:48 crc kubenswrapper[4811]: E0128 16:44:48.769731 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182\": container with ID starting with bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182 not found: ID does not exist" containerID="bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.769765 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182"} err="failed to get container status \"bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182\": rpc error: code = NotFound desc = could not find container \"bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182\": container with ID starting with bbcc6a5d1b80eaee44c6f51bf3a8006b4221a1469eebb5bb589f4d3d6ed44182 not found: ID does not exist" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.769784 4811 scope.go:117] "RemoveContainer" containerID="87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d" Jan 28 16:44:48 crc kubenswrapper[4811]: E0128 16:44:48.770395 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d\": container with ID starting with 87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d not found: ID does not exist" containerID="87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.770451 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d"} err="failed to get container status \"87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d\": rpc error: code = NotFound desc = could not find container \"87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d\": container with ID starting with 87a3802e16808c3a6848cce593fd380fb4dd7ea8bf1e260f43386d7492201a2d not found: ID does not exist" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.812059 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f232f42a-9f42-4a21-abd1-4aa8e008abda" (UID: "f232f42a-9f42-4a21-abd1-4aa8e008abda"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:44:48 crc kubenswrapper[4811]: I0128 16:44:48.865001 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f232f42a-9f42-4a21-abd1-4aa8e008abda-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:44:49 crc kubenswrapper[4811]: I0128 16:44:49.048334 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qclb2"] Jan 28 16:44:49 crc kubenswrapper[4811]: I0128 16:44:49.061132 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qclb2"] Jan 28 16:44:50 crc kubenswrapper[4811]: I0128 16:44:50.353201 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" path="/var/lib/kubelet/pods/f232f42a-9f42-4a21-abd1-4aa8e008abda/volumes" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.161313 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg"] Jan 28 16:45:00 crc kubenswrapper[4811]: E0128 16:45:00.162260 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="extract-content" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.162275 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="extract-content" Jan 28 16:45:00 crc kubenswrapper[4811]: E0128 16:45:00.162292 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="registry-server" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.162300 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="registry-server" Jan 28 16:45:00 crc kubenswrapper[4811]: E0128 16:45:00.162319 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="extract-utilities" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.162327 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="extract-utilities" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.162561 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f232f42a-9f42-4a21-abd1-4aa8e008abda" containerName="registry-server" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.163211 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.165859 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.172873 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.175121 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg"] Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.248099 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crn8s\" (UniqueName: \"kubernetes.io/projected/13094dcc-9bfd-4733-bf36-aa370104fe4e-kube-api-access-crn8s\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.248164 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13094dcc-9bfd-4733-bf36-aa370104fe4e-secret-volume\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.248209 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13094dcc-9bfd-4733-bf36-aa370104fe4e-config-volume\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.350035 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13094dcc-9bfd-4733-bf36-aa370104fe4e-config-volume\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.350420 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crn8s\" (UniqueName: \"kubernetes.io/projected/13094dcc-9bfd-4733-bf36-aa370104fe4e-kube-api-access-crn8s\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.350605 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13094dcc-9bfd-4733-bf36-aa370104fe4e-secret-volume\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.350925 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13094dcc-9bfd-4733-bf36-aa370104fe4e-config-volume\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.358806 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13094dcc-9bfd-4733-bf36-aa370104fe4e-secret-volume\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.381317 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crn8s\" (UniqueName: \"kubernetes.io/projected/13094dcc-9bfd-4733-bf36-aa370104fe4e-kube-api-access-crn8s\") pod \"collect-profiles-29493645-5tfjg\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.501485 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:00 crc kubenswrapper[4811]: I0128 16:45:00.959967 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg"] Jan 28 16:45:00 crc kubenswrapper[4811]: W0128 16:45:00.973531 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13094dcc_9bfd_4733_bf36_aa370104fe4e.slice/crio-2f6c85e7e75e576ac464ced0d25984a2434ec357e5639f3c3693fb8cbb45c03b WatchSource:0}: Error finding container 2f6c85e7e75e576ac464ced0d25984a2434ec357e5639f3c3693fb8cbb45c03b: Status 404 returned error can't find the container with id 2f6c85e7e75e576ac464ced0d25984a2434ec357e5639f3c3693fb8cbb45c03b Jan 28 16:45:01 crc kubenswrapper[4811]: I0128 16:45:01.340994 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:45:01 crc kubenswrapper[4811]: E0128 16:45:01.341312 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:45:01 crc kubenswrapper[4811]: I0128 16:45:01.799358 4811 generic.go:334] "Generic (PLEG): container finished" podID="13094dcc-9bfd-4733-bf36-aa370104fe4e" containerID="01d31856a129f95f65f92a1cadd8dd34a38305f752d7992560027cf5c164e3ab" exitCode=0 Jan 28 16:45:01 crc kubenswrapper[4811]: I0128 16:45:01.799401 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" event={"ID":"13094dcc-9bfd-4733-bf36-aa370104fe4e","Type":"ContainerDied","Data":"01d31856a129f95f65f92a1cadd8dd34a38305f752d7992560027cf5c164e3ab"} Jan 28 16:45:01 crc kubenswrapper[4811]: I0128 16:45:01.799472 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" event={"ID":"13094dcc-9bfd-4733-bf36-aa370104fe4e","Type":"ContainerStarted","Data":"2f6c85e7e75e576ac464ced0d25984a2434ec357e5639f3c3693fb8cbb45c03b"} Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.123153 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.196483 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crn8s\" (UniqueName: \"kubernetes.io/projected/13094dcc-9bfd-4733-bf36-aa370104fe4e-kube-api-access-crn8s\") pod \"13094dcc-9bfd-4733-bf36-aa370104fe4e\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.196766 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13094dcc-9bfd-4733-bf36-aa370104fe4e-secret-volume\") pod \"13094dcc-9bfd-4733-bf36-aa370104fe4e\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.196801 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13094dcc-9bfd-4733-bf36-aa370104fe4e-config-volume\") pod \"13094dcc-9bfd-4733-bf36-aa370104fe4e\" (UID: \"13094dcc-9bfd-4733-bf36-aa370104fe4e\") " Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.197455 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13094dcc-9bfd-4733-bf36-aa370104fe4e-config-volume" (OuterVolumeSpecName: "config-volume") pod "13094dcc-9bfd-4733-bf36-aa370104fe4e" (UID: "13094dcc-9bfd-4733-bf36-aa370104fe4e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.202349 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13094dcc-9bfd-4733-bf36-aa370104fe4e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "13094dcc-9bfd-4733-bf36-aa370104fe4e" (UID: "13094dcc-9bfd-4733-bf36-aa370104fe4e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.202769 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13094dcc-9bfd-4733-bf36-aa370104fe4e-kube-api-access-crn8s" (OuterVolumeSpecName: "kube-api-access-crn8s") pod "13094dcc-9bfd-4733-bf36-aa370104fe4e" (UID: "13094dcc-9bfd-4733-bf36-aa370104fe4e"). InnerVolumeSpecName "kube-api-access-crn8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.298816 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crn8s\" (UniqueName: \"kubernetes.io/projected/13094dcc-9bfd-4733-bf36-aa370104fe4e-kube-api-access-crn8s\") on node \"crc\" DevicePath \"\"" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.298872 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13094dcc-9bfd-4733-bf36-aa370104fe4e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.298902 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13094dcc-9bfd-4733-bf36-aa370104fe4e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.820368 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" event={"ID":"13094dcc-9bfd-4733-bf36-aa370104fe4e","Type":"ContainerDied","Data":"2f6c85e7e75e576ac464ced0d25984a2434ec357e5639f3c3693fb8cbb45c03b"} Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.820713 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f6c85e7e75e576ac464ced0d25984a2434ec357e5639f3c3693fb8cbb45c03b" Jan 28 16:45:03 crc kubenswrapper[4811]: I0128 16:45:03.820423 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg" Jan 28 16:45:04 crc kubenswrapper[4811]: I0128 16:45:04.217502 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf"] Jan 28 16:45:04 crc kubenswrapper[4811]: I0128 16:45:04.227015 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493600-f95tf"] Jan 28 16:45:04 crc kubenswrapper[4811]: I0128 16:45:04.353836 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2522f10c-03a4-43b9-8cd6-ce43816ca0d9" path="/var/lib/kubelet/pods/2522f10c-03a4-43b9-8cd6-ce43816ca0d9/volumes" Jan 28 16:45:14 crc kubenswrapper[4811]: I0128 16:45:14.340150 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:45:14 crc kubenswrapper[4811]: E0128 16:45:14.340955 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:45:26 crc kubenswrapper[4811]: I0128 16:45:26.339787 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:45:26 crc kubenswrapper[4811]: E0128 16:45:26.340920 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:45:29 crc kubenswrapper[4811]: I0128 16:45:29.510008 4811 scope.go:117] "RemoveContainer" containerID="4a3237493d2178658083297fb69845bf88432664a6d30ef70cbd83d69d4bcf39" Jan 28 16:45:38 crc kubenswrapper[4811]: I0128 16:45:38.339766 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:45:38 crc kubenswrapper[4811]: E0128 16:45:38.341075 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:45:51 crc kubenswrapper[4811]: I0128 16:45:51.339860 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:45:51 crc kubenswrapper[4811]: E0128 16:45:51.341032 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:46:04 crc kubenswrapper[4811]: I0128 16:46:04.339576 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:46:04 crc kubenswrapper[4811]: E0128 16:46:04.340483 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:46:18 crc kubenswrapper[4811]: I0128 16:46:18.349001 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:46:18 crc kubenswrapper[4811]: E0128 16:46:18.349870 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:46:29 crc kubenswrapper[4811]: I0128 16:46:29.339518 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:46:29 crc kubenswrapper[4811]: E0128 16:46:29.340193 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:46:43 crc kubenswrapper[4811]: I0128 16:46:43.340412 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:46:43 crc kubenswrapper[4811]: E0128 16:46:43.341284 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:46:58 crc kubenswrapper[4811]: I0128 16:46:58.345292 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:46:58 crc kubenswrapper[4811]: E0128 16:46:58.345809 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:47:13 crc kubenswrapper[4811]: I0128 16:47:13.339177 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:47:13 crc kubenswrapper[4811]: E0128 16:47:13.339958 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:47:24 crc kubenswrapper[4811]: I0128 16:47:24.339255 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:47:24 crc kubenswrapper[4811]: E0128 16:47:24.340163 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:47:25 crc kubenswrapper[4811]: I0128 16:47:25.947661 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fxhw7"] Jan 28 16:47:25 crc kubenswrapper[4811]: E0128 16:47:25.948468 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13094dcc-9bfd-4733-bf36-aa370104fe4e" containerName="collect-profiles" Jan 28 16:47:25 crc kubenswrapper[4811]: I0128 16:47:25.948490 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="13094dcc-9bfd-4733-bf36-aa370104fe4e" containerName="collect-profiles" Jan 28 16:47:25 crc kubenswrapper[4811]: I0128 16:47:25.948720 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="13094dcc-9bfd-4733-bf36-aa370104fe4e" containerName="collect-profiles" Jan 28 16:47:25 crc kubenswrapper[4811]: I0128 16:47:25.950481 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:25 crc kubenswrapper[4811]: I0128 16:47:25.977938 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fxhw7"] Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.039635 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-catalog-content\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.039700 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srg9s\" (UniqueName: \"kubernetes.io/projected/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-kube-api-access-srg9s\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.039884 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-utilities\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.141572 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-utilities\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.141617 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-catalog-content\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.141639 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srg9s\" (UniqueName: \"kubernetes.io/projected/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-kube-api-access-srg9s\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.142362 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-catalog-content\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.142480 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-utilities\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.162047 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srg9s\" (UniqueName: \"kubernetes.io/projected/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-kube-api-access-srg9s\") pod \"redhat-operators-fxhw7\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.274831 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.760419 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fxhw7"] Jan 28 16:47:26 crc kubenswrapper[4811]: I0128 16:47:26.914297 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerStarted","Data":"aa369c0680bc887a6687bdb77e3c9c03e84d5be4a9b851b144c48d45ddccaccf"} Jan 28 16:47:27 crc kubenswrapper[4811]: I0128 16:47:27.923123 4811 generic.go:334] "Generic (PLEG): container finished" podID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerID="1b8c1dd36756d2b8e21880bc90ed213c8d0d3c3972f8ce744ce0ed3791dfe2b0" exitCode=0 Jan 28 16:47:27 crc kubenswrapper[4811]: I0128 16:47:27.923221 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerDied","Data":"1b8c1dd36756d2b8e21880bc90ed213c8d0d3c3972f8ce744ce0ed3791dfe2b0"} Jan 28 16:47:32 crc kubenswrapper[4811]: I0128 16:47:32.993864 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerStarted","Data":"43f766d8292909a2b124eb332a4b3f264150ac1ba54071a89e53d24342c95f19"} Jan 28 16:47:34 crc kubenswrapper[4811]: I0128 16:47:34.006416 4811 generic.go:334] "Generic (PLEG): container finished" podID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerID="43f766d8292909a2b124eb332a4b3f264150ac1ba54071a89e53d24342c95f19" exitCode=0 Jan 28 16:47:34 crc kubenswrapper[4811]: I0128 16:47:34.006507 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerDied","Data":"43f766d8292909a2b124eb332a4b3f264150ac1ba54071a89e53d24342c95f19"} Jan 28 16:47:37 crc kubenswrapper[4811]: I0128 16:47:37.339798 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:47:37 crc kubenswrapper[4811]: E0128 16:47:37.340702 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:47:37 crc kubenswrapper[4811]: I0128 16:47:37.710618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerStarted","Data":"e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307"} Jan 28 16:47:37 crc kubenswrapper[4811]: I0128 16:47:37.742348 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fxhw7" podStartSLOduration=3.784475902 podStartE2EDuration="12.742324357s" podCreationTimestamp="2026-01-28 16:47:25 +0000 UTC" firstStartedPulling="2026-01-28 16:47:27.925087978 +0000 UTC m=+3740.679451571" lastFinishedPulling="2026-01-28 16:47:36.882936453 +0000 UTC m=+3749.637300026" observedRunningTime="2026-01-28 16:47:37.737401744 +0000 UTC m=+3750.491765347" watchObservedRunningTime="2026-01-28 16:47:37.742324357 +0000 UTC m=+3750.496687940" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.408767 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zl8bb"] Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.413329 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.422790 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl8bb"] Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.528181 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqpfv\" (UniqueName: \"kubernetes.io/projected/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-kube-api-access-qqpfv\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.528224 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-utilities\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.528366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-catalog-content\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.629865 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqpfv\" (UniqueName: \"kubernetes.io/projected/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-kube-api-access-qqpfv\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.629953 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-utilities\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.630062 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-catalog-content\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.630555 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-catalog-content\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.630583 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-utilities\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.648849 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqpfv\" (UniqueName: \"kubernetes.io/projected/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-kube-api-access-qqpfv\") pod \"redhat-marketplace-zl8bb\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:40 crc kubenswrapper[4811]: I0128 16:47:40.755378 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:47:41 crc kubenswrapper[4811]: I0128 16:47:41.015395 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl8bb"] Jan 28 16:47:41 crc kubenswrapper[4811]: W0128 16:47:41.018316 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02d0dda0_7114_4dc5_9a8d_1ce910c9b21a.slice/crio-7e6a3308758575ab16083ee2b5af94e178e3743d54298d83405d02e73f4ba031 WatchSource:0}: Error finding container 7e6a3308758575ab16083ee2b5af94e178e3743d54298d83405d02e73f4ba031: Status 404 returned error can't find the container with id 7e6a3308758575ab16083ee2b5af94e178e3743d54298d83405d02e73f4ba031 Jan 28 16:47:41 crc kubenswrapper[4811]: I0128 16:47:41.742474 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl8bb" event={"ID":"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a","Type":"ContainerStarted","Data":"7e6a3308758575ab16083ee2b5af94e178e3743d54298d83405d02e73f4ba031"} Jan 28 16:47:42 crc kubenswrapper[4811]: I0128 16:47:42.749465 4811 generic.go:334] "Generic (PLEG): container finished" podID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerID="6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba" exitCode=0 Jan 28 16:47:42 crc kubenswrapper[4811]: I0128 16:47:42.749528 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl8bb" event={"ID":"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a","Type":"ContainerDied","Data":"6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba"} Jan 28 16:47:45 crc kubenswrapper[4811]: I0128 16:47:45.780405 4811 generic.go:334] "Generic (PLEG): container finished" podID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerID="33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86" exitCode=0 Jan 28 16:47:45 crc kubenswrapper[4811]: I0128 16:47:45.780538 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl8bb" event={"ID":"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a","Type":"ContainerDied","Data":"33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86"} Jan 28 16:47:46 crc kubenswrapper[4811]: I0128 16:47:46.275908 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:46 crc kubenswrapper[4811]: I0128 16:47:46.275987 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:46 crc kubenswrapper[4811]: I0128 16:47:46.316789 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:46 crc kubenswrapper[4811]: I0128 16:47:46.854078 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:47 crc kubenswrapper[4811]: I0128 16:47:47.040897 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fxhw7"] Jan 28 16:47:48 crc kubenswrapper[4811]: I0128 16:47:48.804035 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fxhw7" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="registry-server" containerID="cri-o://e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307" gracePeriod=2 Jan 28 16:47:49 crc kubenswrapper[4811]: I0128 16:47:49.339848 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:47:49 crc kubenswrapper[4811]: E0128 16:47:49.340388 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:47:56 crc kubenswrapper[4811]: E0128 16:47:56.275737 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307 is running failed: container process not found" containerID="e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307" cmd=["grpc_health_probe","-addr=:50051"] Jan 28 16:47:56 crc kubenswrapper[4811]: E0128 16:47:56.276979 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307 is running failed: container process not found" containerID="e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307" cmd=["grpc_health_probe","-addr=:50051"] Jan 28 16:47:56 crc kubenswrapper[4811]: E0128 16:47:56.277379 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307 is running failed: container process not found" containerID="e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307" cmd=["grpc_health_probe","-addr=:50051"] Jan 28 16:47:56 crc kubenswrapper[4811]: E0128 16:47:56.277475 4811 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-fxhw7" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="registry-server" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.483461 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fxhw7_f3e4cdc3-cbdc-45e1-b837-ba30d1976829/registry-server/0.log" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.485543 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.584392 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fxhw7_f3e4cdc3-cbdc-45e1-b837-ba30d1976829/registry-server/0.log" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.585379 4811 generic.go:334] "Generic (PLEG): container finished" podID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerID="e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307" exitCode=137 Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.585457 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerDied","Data":"e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307"} Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.585586 4811 scope.go:117] "RemoveContainer" containerID="e387959b7b9ff22ae9799ecc53e64653184bc5bc4dd74fba8d73a8c042f01307" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.608512 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-catalog-content\") pod \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.608962 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srg9s\" (UniqueName: \"kubernetes.io/projected/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-kube-api-access-srg9s\") pod \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.609128 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-utilities\") pod \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\" (UID: \"f3e4cdc3-cbdc-45e1-b837-ba30d1976829\") " Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.906210 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-utilities" (OuterVolumeSpecName: "utilities") pod "f3e4cdc3-cbdc-45e1-b837-ba30d1976829" (UID: "f3e4cdc3-cbdc-45e1-b837-ba30d1976829"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.913361 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:47:58 crc kubenswrapper[4811]: I0128 16:47:58.917973 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-kube-api-access-srg9s" (OuterVolumeSpecName: "kube-api-access-srg9s") pod "f3e4cdc3-cbdc-45e1-b837-ba30d1976829" (UID: "f3e4cdc3-cbdc-45e1-b837-ba30d1976829"). InnerVolumeSpecName "kube-api-access-srg9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.014901 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srg9s\" (UniqueName: \"kubernetes.io/projected/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-kube-api-access-srg9s\") on node \"crc\" DevicePath \"\"" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.198815 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3e4cdc3-cbdc-45e1-b837-ba30d1976829" (UID: "f3e4cdc3-cbdc-45e1-b837-ba30d1976829"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.217810 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e4cdc3-cbdc-45e1-b837-ba30d1976829-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.360230 4811 scope.go:117] "RemoveContainer" containerID="43f766d8292909a2b124eb332a4b3f264150ac1ba54071a89e53d24342c95f19" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.457648 4811 scope.go:117] "RemoveContainer" containerID="1b8c1dd36756d2b8e21880bc90ed213c8d0d3c3972f8ce744ce0ed3791dfe2b0" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.595282 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fxhw7" event={"ID":"f3e4cdc3-cbdc-45e1-b837-ba30d1976829","Type":"ContainerDied","Data":"aa369c0680bc887a6687bdb77e3c9c03e84d5be4a9b851b144c48d45ddccaccf"} Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.595385 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fxhw7" Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.643822 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fxhw7"] Jan 28 16:47:59 crc kubenswrapper[4811]: I0128 16:47:59.649587 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fxhw7"] Jan 28 16:48:00 crc kubenswrapper[4811]: I0128 16:48:00.350800 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" path="/var/lib/kubelet/pods/f3e4cdc3-cbdc-45e1-b837-ba30d1976829/volumes" Jan 28 16:48:00 crc kubenswrapper[4811]: I0128 16:48:00.604110 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl8bb" event={"ID":"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a","Type":"ContainerStarted","Data":"1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a"} Jan 28 16:48:00 crc kubenswrapper[4811]: I0128 16:48:00.630312 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zl8bb" podStartSLOduration=3.837186979 podStartE2EDuration="20.630294741s" podCreationTimestamp="2026-01-28 16:47:40 +0000 UTC" firstStartedPulling="2026-01-28 16:47:42.75099136 +0000 UTC m=+3755.505354943" lastFinishedPulling="2026-01-28 16:47:59.544099132 +0000 UTC m=+3772.298462705" observedRunningTime="2026-01-28 16:48:00.625560222 +0000 UTC m=+3773.379923805" watchObservedRunningTime="2026-01-28 16:48:00.630294741 +0000 UTC m=+3773.384658324" Jan 28 16:48:00 crc kubenswrapper[4811]: I0128 16:48:00.755910 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:48:00 crc kubenswrapper[4811]: I0128 16:48:00.755971 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:48:01 crc kubenswrapper[4811]: I0128 16:48:01.339395 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:48:01 crc kubenswrapper[4811]: E0128 16:48:01.340141 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:48:01 crc kubenswrapper[4811]: I0128 16:48:01.797729 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-zl8bb" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="registry-server" probeResult="failure" output=< Jan 28 16:48:01 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 16:48:01 crc kubenswrapper[4811]: > Jan 28 16:48:10 crc kubenswrapper[4811]: I0128 16:48:10.824128 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:48:10 crc kubenswrapper[4811]: I0128 16:48:10.887102 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:48:11 crc kubenswrapper[4811]: I0128 16:48:11.606277 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl8bb"] Jan 28 16:48:12 crc kubenswrapper[4811]: I0128 16:48:12.689560 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zl8bb" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="registry-server" containerID="cri-o://1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a" gracePeriod=2 Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.629315 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.642224 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-utilities\") pod \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.642277 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-catalog-content\") pod \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.642482 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqpfv\" (UniqueName: \"kubernetes.io/projected/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-kube-api-access-qqpfv\") pod \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\" (UID: \"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a\") " Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.643382 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-utilities" (OuterVolumeSpecName: "utilities") pod "02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" (UID: "02d0dda0-7114-4dc5-9a8d-1ce910c9b21a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.645113 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.654065 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-kube-api-access-qqpfv" (OuterVolumeSpecName: "kube-api-access-qqpfv") pod "02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" (UID: "02d0dda0-7114-4dc5-9a8d-1ce910c9b21a"). InnerVolumeSpecName "kube-api-access-qqpfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.669599 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" (UID: "02d0dda0-7114-4dc5-9a8d-1ce910c9b21a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.698232 4811 generic.go:334] "Generic (PLEG): container finished" podID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerID="1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a" exitCode=0 Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.698272 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl8bb" event={"ID":"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a","Type":"ContainerDied","Data":"1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a"} Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.698298 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl8bb" event={"ID":"02d0dda0-7114-4dc5-9a8d-1ce910c9b21a","Type":"ContainerDied","Data":"7e6a3308758575ab16083ee2b5af94e178e3743d54298d83405d02e73f4ba031"} Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.698314 4811 scope.go:117] "RemoveContainer" containerID="1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.698312 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl8bb" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.731167 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl8bb"] Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.731228 4811 scope.go:117] "RemoveContainer" containerID="33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.736849 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl8bb"] Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.746617 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqpfv\" (UniqueName: \"kubernetes.io/projected/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-kube-api-access-qqpfv\") on node \"crc\" DevicePath \"\"" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.746645 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.746830 4811 scope.go:117] "RemoveContainer" containerID="6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.771370 4811 scope.go:117] "RemoveContainer" containerID="1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a" Jan 28 16:48:13 crc kubenswrapper[4811]: E0128 16:48:13.771792 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a\": container with ID starting with 1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a not found: ID does not exist" containerID="1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.771827 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a"} err="failed to get container status \"1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a\": rpc error: code = NotFound desc = could not find container \"1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a\": container with ID starting with 1c54bec8b9dcfb12acb0568debecd8a9859dee8df0703084f02cc38a5614884a not found: ID does not exist" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.771861 4811 scope.go:117] "RemoveContainer" containerID="33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86" Jan 28 16:48:13 crc kubenswrapper[4811]: E0128 16:48:13.772258 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86\": container with ID starting with 33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86 not found: ID does not exist" containerID="33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.772299 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86"} err="failed to get container status \"33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86\": rpc error: code = NotFound desc = could not find container \"33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86\": container with ID starting with 33590c37d752d152f1a331ee2f7794c67ddf12542705f350cc326bbcc8e1ce86 not found: ID does not exist" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.772331 4811 scope.go:117] "RemoveContainer" containerID="6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba" Jan 28 16:48:13 crc kubenswrapper[4811]: E0128 16:48:13.772719 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba\": container with ID starting with 6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba not found: ID does not exist" containerID="6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba" Jan 28 16:48:13 crc kubenswrapper[4811]: I0128 16:48:13.772739 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba"} err="failed to get container status \"6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba\": rpc error: code = NotFound desc = could not find container \"6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba\": container with ID starting with 6524722183c5793a7580cb6d732f708b0d2926ab502c19e6d3595980d4ea85ba not found: ID does not exist" Jan 28 16:48:14 crc kubenswrapper[4811]: I0128 16:48:14.347565 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:48:14 crc kubenswrapper[4811]: E0128 16:48:14.349270 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:48:14 crc kubenswrapper[4811]: I0128 16:48:14.378882 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" path="/var/lib/kubelet/pods/02d0dda0-7114-4dc5-9a8d-1ce910c9b21a/volumes" Jan 28 16:48:28 crc kubenswrapper[4811]: I0128 16:48:28.347294 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:48:28 crc kubenswrapper[4811]: E0128 16:48:28.348367 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:48:43 crc kubenswrapper[4811]: I0128 16:48:43.339521 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:48:43 crc kubenswrapper[4811]: E0128 16:48:43.340965 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:48:56 crc kubenswrapper[4811]: I0128 16:48:56.339849 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:48:56 crc kubenswrapper[4811]: E0128 16:48:56.340630 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:49:08 crc kubenswrapper[4811]: I0128 16:49:08.343451 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:49:08 crc kubenswrapper[4811]: E0128 16:49:08.345000 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:49:20 crc kubenswrapper[4811]: I0128 16:49:20.340513 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:49:20 crc kubenswrapper[4811]: E0128 16:49:20.342653 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:49:33 crc kubenswrapper[4811]: I0128 16:49:33.339864 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:49:34 crc kubenswrapper[4811]: I0128 16:49:34.390177 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"6ffc6181c2df9e8122892d36e64d8a53ec62a9dcaf506090ad12c923723f0bda"} Jan 28 16:51:33 crc kubenswrapper[4811]: I0128 16:51:33.087462 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:51:33 crc kubenswrapper[4811]: I0128 16:51:33.088042 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:52:03 crc kubenswrapper[4811]: I0128 16:52:03.088042 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:52:03 crc kubenswrapper[4811]: I0128 16:52:03.088876 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.087008 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.087644 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.087694 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.088342 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ffc6181c2df9e8122892d36e64d8a53ec62a9dcaf506090ad12c923723f0bda"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.088402 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://6ffc6181c2df9e8122892d36e64d8a53ec62a9dcaf506090ad12c923723f0bda" gracePeriod=600 Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.641675 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="6ffc6181c2df9e8122892d36e64d8a53ec62a9dcaf506090ad12c923723f0bda" exitCode=0 Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.641770 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"6ffc6181c2df9e8122892d36e64d8a53ec62a9dcaf506090ad12c923723f0bda"} Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.642056 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8"} Jan 28 16:52:33 crc kubenswrapper[4811]: I0128 16:52:33.642079 4811 scope.go:117] "RemoveContainer" containerID="c45eec3aa18190b78e9326894343a9c07c27b2874b1233774091df8571bf91d8" Jan 28 16:54:33 crc kubenswrapper[4811]: I0128 16:54:33.087645 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:54:33 crc kubenswrapper[4811]: I0128 16:54:33.088162 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.438418 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 16:54:38 crc kubenswrapper[4811]: E0128 16:54:38.439222 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="extract-content" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439235 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="extract-content" Jan 28 16:54:38 crc kubenswrapper[4811]: E0128 16:54:38.439249 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="extract-utilities" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439283 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="extract-utilities" Jan 28 16:54:38 crc kubenswrapper[4811]: E0128 16:54:38.439294 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="extract-utilities" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439300 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="extract-utilities" Jan 28 16:54:38 crc kubenswrapper[4811]: E0128 16:54:38.439314 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="extract-content" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439319 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="extract-content" Jan 28 16:54:38 crc kubenswrapper[4811]: E0128 16:54:38.439332 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="registry-server" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439340 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="registry-server" Jan 28 16:54:38 crc kubenswrapper[4811]: E0128 16:54:38.439349 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="registry-server" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439355 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="registry-server" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439508 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e4cdc3-cbdc-45e1-b837-ba30d1976829" containerName="registry-server" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.439536 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="02d0dda0-7114-4dc5-9a8d-1ce910c9b21a" containerName="registry-server" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.440623 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.452887 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.588649 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-catalog-content\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.588773 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-utilities\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.588850 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk8tg\" (UniqueName: \"kubernetes.io/projected/2178b105-4528-4a3c-be83-c0c4280e40b5-kube-api-access-pk8tg\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.690576 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-utilities\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.691104 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-utilities\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.691258 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk8tg\" (UniqueName: \"kubernetes.io/projected/2178b105-4528-4a3c-be83-c0c4280e40b5-kube-api-access-pk8tg\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.691672 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-catalog-content\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.691972 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-catalog-content\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.714207 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk8tg\" (UniqueName: \"kubernetes.io/projected/2178b105-4528-4a3c-be83-c0c4280e40b5-kube-api-access-pk8tg\") pod \"community-operators-j5blb\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:38 crc kubenswrapper[4811]: I0128 16:54:38.758618 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:39 crc kubenswrapper[4811]: I0128 16:54:39.031597 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 16:54:39 crc kubenswrapper[4811]: I0128 16:54:39.188295 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j5blb" event={"ID":"2178b105-4528-4a3c-be83-c0c4280e40b5","Type":"ContainerStarted","Data":"07365b6ff136fa2c8f1149e71e50499266a9f62913c49f011cf09408e84effe0"} Jan 28 16:54:40 crc kubenswrapper[4811]: I0128 16:54:40.196568 4811 generic.go:334] "Generic (PLEG): container finished" podID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerID="97ee7c1747e4b2ad3f80fdacd74169e73e00a29c6a600b20635f9fbc15b96092" exitCode=0 Jan 28 16:54:40 crc kubenswrapper[4811]: I0128 16:54:40.196611 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j5blb" event={"ID":"2178b105-4528-4a3c-be83-c0c4280e40b5","Type":"ContainerDied","Data":"97ee7c1747e4b2ad3f80fdacd74169e73e00a29c6a600b20635f9fbc15b96092"} Jan 28 16:54:40 crc kubenswrapper[4811]: I0128 16:54:40.199185 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 16:54:45 crc kubenswrapper[4811]: I0128 16:54:45.236847 4811 generic.go:334] "Generic (PLEG): container finished" podID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerID="f7c6ae3901e1d246a07d368a425f8680d262211717e170d80cd0657bacf621b0" exitCode=0 Jan 28 16:54:45 crc kubenswrapper[4811]: I0128 16:54:45.237290 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j5blb" event={"ID":"2178b105-4528-4a3c-be83-c0c4280e40b5","Type":"ContainerDied","Data":"f7c6ae3901e1d246a07d368a425f8680d262211717e170d80cd0657bacf621b0"} Jan 28 16:54:46 crc kubenswrapper[4811]: I0128 16:54:46.244926 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j5blb" event={"ID":"2178b105-4528-4a3c-be83-c0c4280e40b5","Type":"ContainerStarted","Data":"8544410b22139b8422837506ce3f85d0e855893dd94d7de451d14e7b44fc0464"} Jan 28 16:54:46 crc kubenswrapper[4811]: I0128 16:54:46.264810 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j5blb" podStartSLOduration=2.848987566 podStartE2EDuration="8.264774354s" podCreationTimestamp="2026-01-28 16:54:38 +0000 UTC" firstStartedPulling="2026-01-28 16:54:40.198904554 +0000 UTC m=+4172.953268147" lastFinishedPulling="2026-01-28 16:54:45.614691362 +0000 UTC m=+4178.369054935" observedRunningTime="2026-01-28 16:54:46.263630912 +0000 UTC m=+4179.017994495" watchObservedRunningTime="2026-01-28 16:54:46.264774354 +0000 UTC m=+4179.019137937" Jan 28 16:54:48 crc kubenswrapper[4811]: I0128 16:54:48.759315 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:48 crc kubenswrapper[4811]: I0128 16:54:48.759715 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:48 crc kubenswrapper[4811]: I0128 16:54:48.816998 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:58 crc kubenswrapper[4811]: I0128 16:54:58.898672 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j5blb" Jan 28 16:54:58 crc kubenswrapper[4811]: I0128 16:54:58.971351 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.006800 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9g6x"] Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.007035 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n9g6x" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="registry-server" containerID="cri-o://22b17862f1b05427d66dd3a614214138c961a643739ced7f74604e99733cc1dd" gracePeriod=2 Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.358653 4811 generic.go:334] "Generic (PLEG): container finished" podID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerID="22b17862f1b05427d66dd3a614214138c961a643739ced7f74604e99733cc1dd" exitCode=0 Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.359014 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerDied","Data":"22b17862f1b05427d66dd3a614214138c961a643739ced7f74604e99733cc1dd"} Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.474756 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.651234 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-utilities\") pod \"30afaad7-f370-49c2-8813-9f66d50ad6c5\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.651288 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-catalog-content\") pod \"30afaad7-f370-49c2-8813-9f66d50ad6c5\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.651313 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gv2vr\" (UniqueName: \"kubernetes.io/projected/30afaad7-f370-49c2-8813-9f66d50ad6c5-kube-api-access-gv2vr\") pod \"30afaad7-f370-49c2-8813-9f66d50ad6c5\" (UID: \"30afaad7-f370-49c2-8813-9f66d50ad6c5\") " Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.651863 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-utilities" (OuterVolumeSpecName: "utilities") pod "30afaad7-f370-49c2-8813-9f66d50ad6c5" (UID: "30afaad7-f370-49c2-8813-9f66d50ad6c5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.660614 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30afaad7-f370-49c2-8813-9f66d50ad6c5-kube-api-access-gv2vr" (OuterVolumeSpecName: "kube-api-access-gv2vr") pod "30afaad7-f370-49c2-8813-9f66d50ad6c5" (UID: "30afaad7-f370-49c2-8813-9f66d50ad6c5"). InnerVolumeSpecName "kube-api-access-gv2vr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.752689 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.752732 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gv2vr\" (UniqueName: \"kubernetes.io/projected/30afaad7-f370-49c2-8813-9f66d50ad6c5-kube-api-access-gv2vr\") on node \"crc\" DevicePath \"\"" Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.763274 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30afaad7-f370-49c2-8813-9f66d50ad6c5" (UID: "30afaad7-f370-49c2-8813-9f66d50ad6c5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:54:59 crc kubenswrapper[4811]: I0128 16:54:59.853664 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30afaad7-f370-49c2-8813-9f66d50ad6c5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.369393 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9g6x" event={"ID":"30afaad7-f370-49c2-8813-9f66d50ad6c5","Type":"ContainerDied","Data":"a390f24cdb05655642296e0c9485262864b8a2a349c7325a6f9de91e3ea1296d"} Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.369464 4811 scope.go:117] "RemoveContainer" containerID="22b17862f1b05427d66dd3a614214138c961a643739ced7f74604e99733cc1dd" Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.369608 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9g6x" Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.397942 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9g6x"] Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.407238 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n9g6x"] Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.408376 4811 scope.go:117] "RemoveContainer" containerID="c1cde5fe3cce8d09a8669e2c80aadb1b7acded66018bb8391861829e74a4a111" Jan 28 16:55:00 crc kubenswrapper[4811]: I0128 16:55:00.436725 4811 scope.go:117] "RemoveContainer" containerID="1d0912e720a10b86c8e52b41115c372758961b7dcc9e7260098c41d3fca7a015" Jan 28 16:55:02 crc kubenswrapper[4811]: I0128 16:55:02.348669 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" path="/var/lib/kubelet/pods/30afaad7-f370-49c2-8813-9f66d50ad6c5/volumes" Jan 28 16:55:03 crc kubenswrapper[4811]: I0128 16:55:03.087481 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:55:03 crc kubenswrapper[4811]: I0128 16:55:03.087558 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.087675 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.088329 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.088385 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.089124 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.089193 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" gracePeriod=600 Jan 28 16:55:33 crc kubenswrapper[4811]: E0128 16:55:33.226647 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.594764 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" exitCode=0 Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.594811 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8"} Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.594854 4811 scope.go:117] "RemoveContainer" containerID="6ffc6181c2df9e8122892d36e64d8a53ec62a9dcaf506090ad12c923723f0bda" Jan 28 16:55:33 crc kubenswrapper[4811]: I0128 16:55:33.595421 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:55:33 crc kubenswrapper[4811]: E0128 16:55:33.595770 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:55:47 crc kubenswrapper[4811]: I0128 16:55:47.339968 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:55:47 crc kubenswrapper[4811]: E0128 16:55:47.341085 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:56:01 crc kubenswrapper[4811]: I0128 16:56:01.339238 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:56:01 crc kubenswrapper[4811]: E0128 16:56:01.340026 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:56:12 crc kubenswrapper[4811]: I0128 16:56:12.339206 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:56:12 crc kubenswrapper[4811]: E0128 16:56:12.339918 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:56:24 crc kubenswrapper[4811]: I0128 16:56:24.339977 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:56:24 crc kubenswrapper[4811]: E0128 16:56:24.340845 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:56:35 crc kubenswrapper[4811]: I0128 16:56:35.339119 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:56:35 crc kubenswrapper[4811]: E0128 16:56:35.339891 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:56:46 crc kubenswrapper[4811]: I0128 16:56:46.339906 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:56:46 crc kubenswrapper[4811]: E0128 16:56:46.340578 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:56:59 crc kubenswrapper[4811]: I0128 16:56:59.340024 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:56:59 crc kubenswrapper[4811]: E0128 16:56:59.340918 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:57:14 crc kubenswrapper[4811]: I0128 16:57:14.339032 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:57:14 crc kubenswrapper[4811]: E0128 16:57:14.339847 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:57:28 crc kubenswrapper[4811]: I0128 16:57:28.343336 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:57:28 crc kubenswrapper[4811]: E0128 16:57:28.344037 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:57:40 crc kubenswrapper[4811]: I0128 16:57:40.339674 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:57:40 crc kubenswrapper[4811]: E0128 16:57:40.340495 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:57:53 crc kubenswrapper[4811]: I0128 16:57:53.339349 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:57:53 crc kubenswrapper[4811]: E0128 16:57:53.340271 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.847723 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5mlhl"] Jan 28 16:57:58 crc kubenswrapper[4811]: E0128 16:57:58.848711 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="extract-utilities" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.848727 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="extract-utilities" Jan 28 16:57:58 crc kubenswrapper[4811]: E0128 16:57:58.848742 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="registry-server" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.848750 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="registry-server" Jan 28 16:57:58 crc kubenswrapper[4811]: E0128 16:57:58.848799 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="extract-content" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.848807 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="extract-content" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.849023 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="30afaad7-f370-49c2-8813-9f66d50ad6c5" containerName="registry-server" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.850419 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.869511 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5mlhl"] Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.969047 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-utilities\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.969255 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p82b\" (UniqueName: \"kubernetes.io/projected/84167b2c-2f65-4466-a855-c54c0a71c16a-kube-api-access-5p82b\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:58 crc kubenswrapper[4811]: I0128 16:57:58.969396 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-catalog-content\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.071285 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p82b\" (UniqueName: \"kubernetes.io/projected/84167b2c-2f65-4466-a855-c54c0a71c16a-kube-api-access-5p82b\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.071415 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-catalog-content\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.071488 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-utilities\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.072054 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-catalog-content\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.072118 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-utilities\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.096273 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p82b\" (UniqueName: \"kubernetes.io/projected/84167b2c-2f65-4466-a855-c54c0a71c16a-kube-api-access-5p82b\") pod \"redhat-operators-5mlhl\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.170611 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.607277 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5mlhl"] Jan 28 16:57:59 crc kubenswrapper[4811]: I0128 16:57:59.644506 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5mlhl" event={"ID":"84167b2c-2f65-4466-a855-c54c0a71c16a","Type":"ContainerStarted","Data":"d0994a256307000c9e53b00503d0ce129823efc2203ebd82bca4b478d8d2ecc6"} Jan 28 16:58:00 crc kubenswrapper[4811]: I0128 16:58:00.651324 4811 generic.go:334] "Generic (PLEG): container finished" podID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerID="c2dc7a73a7d2c9af84ab2e5667e20136fab51c11a2147f5b15a73c7fecf011f4" exitCode=0 Jan 28 16:58:00 crc kubenswrapper[4811]: I0128 16:58:00.651384 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5mlhl" event={"ID":"84167b2c-2f65-4466-a855-c54c0a71c16a","Type":"ContainerDied","Data":"c2dc7a73a7d2c9af84ab2e5667e20136fab51c11a2147f5b15a73c7fecf011f4"} Jan 28 16:58:02 crc kubenswrapper[4811]: I0128 16:58:02.673235 4811 generic.go:334] "Generic (PLEG): container finished" podID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerID="6f733eff338c4ca285a838e263a6cd22fa5075d83eed990b141c8b761ad1b9b4" exitCode=0 Jan 28 16:58:02 crc kubenswrapper[4811]: I0128 16:58:02.673280 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5mlhl" event={"ID":"84167b2c-2f65-4466-a855-c54c0a71c16a","Type":"ContainerDied","Data":"6f733eff338c4ca285a838e263a6cd22fa5075d83eed990b141c8b761ad1b9b4"} Jan 28 16:58:06 crc kubenswrapper[4811]: I0128 16:58:06.339289 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:58:06 crc kubenswrapper[4811]: E0128 16:58:06.340072 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:58:13 crc kubenswrapper[4811]: I0128 16:58:13.758849 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5mlhl" event={"ID":"84167b2c-2f65-4466-a855-c54c0a71c16a","Type":"ContainerStarted","Data":"019af9d9960f03b5d0858cdc9dea06573b0e768633858c7e3e7515f84b6e595a"} Jan 28 16:58:13 crc kubenswrapper[4811]: I0128 16:58:13.780923 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5mlhl" podStartSLOduration=3.009348702 podStartE2EDuration="15.780903215s" podCreationTimestamp="2026-01-28 16:57:58 +0000 UTC" firstStartedPulling="2026-01-28 16:58:00.652834682 +0000 UTC m=+4373.407198265" lastFinishedPulling="2026-01-28 16:58:13.424389195 +0000 UTC m=+4386.178752778" observedRunningTime="2026-01-28 16:58:13.775352584 +0000 UTC m=+4386.529716187" watchObservedRunningTime="2026-01-28 16:58:13.780903215 +0000 UTC m=+4386.535266798" Jan 28 16:58:19 crc kubenswrapper[4811]: I0128 16:58:19.170749 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:58:19 crc kubenswrapper[4811]: I0128 16:58:19.171110 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:58:19 crc kubenswrapper[4811]: I0128 16:58:19.212146 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:58:19 crc kubenswrapper[4811]: I0128 16:58:19.340101 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:58:19 crc kubenswrapper[4811]: E0128 16:58:19.340425 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:58:19 crc kubenswrapper[4811]: I0128 16:58:19.835297 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:58:19 crc kubenswrapper[4811]: I0128 16:58:19.884849 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5mlhl"] Jan 28 16:58:21 crc kubenswrapper[4811]: I0128 16:58:21.809926 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5mlhl" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="registry-server" containerID="cri-o://019af9d9960f03b5d0858cdc9dea06573b0e768633858c7e3e7515f84b6e595a" gracePeriod=2 Jan 28 16:58:22 crc kubenswrapper[4811]: I0128 16:58:22.819686 4811 generic.go:334] "Generic (PLEG): container finished" podID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerID="019af9d9960f03b5d0858cdc9dea06573b0e768633858c7e3e7515f84b6e595a" exitCode=0 Jan 28 16:58:22 crc kubenswrapper[4811]: I0128 16:58:22.819792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5mlhl" event={"ID":"84167b2c-2f65-4466-a855-c54c0a71c16a","Type":"ContainerDied","Data":"019af9d9960f03b5d0858cdc9dea06573b0e768633858c7e3e7515f84b6e595a"} Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.299538 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.333156 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p82b\" (UniqueName: \"kubernetes.io/projected/84167b2c-2f65-4466-a855-c54c0a71c16a-kube-api-access-5p82b\") pod \"84167b2c-2f65-4466-a855-c54c0a71c16a\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.333274 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-catalog-content\") pod \"84167b2c-2f65-4466-a855-c54c0a71c16a\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.333355 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-utilities\") pod \"84167b2c-2f65-4466-a855-c54c0a71c16a\" (UID: \"84167b2c-2f65-4466-a855-c54c0a71c16a\") " Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.334216 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-utilities" (OuterVolumeSpecName: "utilities") pod "84167b2c-2f65-4466-a855-c54c0a71c16a" (UID: "84167b2c-2f65-4466-a855-c54c0a71c16a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.338751 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84167b2c-2f65-4466-a855-c54c0a71c16a-kube-api-access-5p82b" (OuterVolumeSpecName: "kube-api-access-5p82b") pod "84167b2c-2f65-4466-a855-c54c0a71c16a" (UID: "84167b2c-2f65-4466-a855-c54c0a71c16a"). InnerVolumeSpecName "kube-api-access-5p82b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.435825 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p82b\" (UniqueName: \"kubernetes.io/projected/84167b2c-2f65-4466-a855-c54c0a71c16a-kube-api-access-5p82b\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.435854 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.448848 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84167b2c-2f65-4466-a855-c54c0a71c16a" (UID: "84167b2c-2f65-4466-a855-c54c0a71c16a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.537387 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84167b2c-2f65-4466-a855-c54c0a71c16a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.832251 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5mlhl" event={"ID":"84167b2c-2f65-4466-a855-c54c0a71c16a","Type":"ContainerDied","Data":"d0994a256307000c9e53b00503d0ce129823efc2203ebd82bca4b478d8d2ecc6"} Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.832320 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5mlhl" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.832334 4811 scope.go:117] "RemoveContainer" containerID="019af9d9960f03b5d0858cdc9dea06573b0e768633858c7e3e7515f84b6e595a" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.864117 4811 scope.go:117] "RemoveContainer" containerID="6f733eff338c4ca285a838e263a6cd22fa5075d83eed990b141c8b761ad1b9b4" Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.865782 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5mlhl"] Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.876042 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5mlhl"] Jan 28 16:58:23 crc kubenswrapper[4811]: I0128 16:58:23.884633 4811 scope.go:117] "RemoveContainer" containerID="c2dc7a73a7d2c9af84ab2e5667e20136fab51c11a2147f5b15a73c7fecf011f4" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.059686 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dzrdr"] Jan 28 16:58:24 crc kubenswrapper[4811]: E0128 16:58:24.060481 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="extract-content" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.060508 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="extract-content" Jan 28 16:58:24 crc kubenswrapper[4811]: E0128 16:58:24.060535 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="registry-server" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.060543 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="registry-server" Jan 28 16:58:24 crc kubenswrapper[4811]: E0128 16:58:24.060577 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="extract-utilities" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.060587 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="extract-utilities" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.060763 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" containerName="registry-server" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.062191 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.066862 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dzrdr"] Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.147555 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-utilities\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.147615 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-catalog-content\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.147678 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc78k\" (UniqueName: \"kubernetes.io/projected/5ccbf5af-b62f-4082-823d-6ee73c92897a-kube-api-access-jc78k\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.248610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-utilities\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.248681 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-catalog-content\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.248741 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc78k\" (UniqueName: \"kubernetes.io/projected/5ccbf5af-b62f-4082-823d-6ee73c92897a-kube-api-access-jc78k\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.249634 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-utilities\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.249899 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-catalog-content\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.268839 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc78k\" (UniqueName: \"kubernetes.io/projected/5ccbf5af-b62f-4082-823d-6ee73c92897a-kube-api-access-jc78k\") pod \"certified-operators-dzrdr\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.362852 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84167b2c-2f65-4466-a855-c54c0a71c16a" path="/var/lib/kubelet/pods/84167b2c-2f65-4466-a855-c54c0a71c16a/volumes" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.395285 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.703214 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dzrdr"] Jan 28 16:58:24 crc kubenswrapper[4811]: I0128 16:58:24.839712 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzrdr" event={"ID":"5ccbf5af-b62f-4082-823d-6ee73c92897a","Type":"ContainerStarted","Data":"a1c697def7d186efcfb33ba4673606c37cb23b72671de615c9276609cd0747b7"} Jan 28 16:58:25 crc kubenswrapper[4811]: I0128 16:58:25.851849 4811 generic.go:334] "Generic (PLEG): container finished" podID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerID="3c22afdb78781e935e96c68e674d373f5d584813c0b7dd9bd64a645edf82241c" exitCode=0 Jan 28 16:58:25 crc kubenswrapper[4811]: I0128 16:58:25.851931 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzrdr" event={"ID":"5ccbf5af-b62f-4082-823d-6ee73c92897a","Type":"ContainerDied","Data":"3c22afdb78781e935e96c68e674d373f5d584813c0b7dd9bd64a645edf82241c"} Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.059299 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qd5j2"] Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.061121 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.067402 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qd5j2"] Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.190150 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-catalog-content\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.190210 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-utilities\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.190264 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvtpd\" (UniqueName: \"kubernetes.io/projected/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-kube-api-access-dvtpd\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.292114 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvtpd\" (UniqueName: \"kubernetes.io/projected/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-kube-api-access-dvtpd\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.292639 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-catalog-content\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.292686 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-utilities\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.293493 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-catalog-content\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.293526 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-utilities\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.315967 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvtpd\" (UniqueName: \"kubernetes.io/projected/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-kube-api-access-dvtpd\") pod \"redhat-marketplace-qd5j2\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.382454 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.640011 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qd5j2"] Jan 28 16:58:27 crc kubenswrapper[4811]: W0128 16:58:27.680799 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc33ce5e_9ed1_4b65_a8f6_58bff62e8de7.slice/crio-e17490dea39add5e0130bd044bb73a93002ec6bb5e2552b250fb87db9e5b2a94 WatchSource:0}: Error finding container e17490dea39add5e0130bd044bb73a93002ec6bb5e2552b250fb87db9e5b2a94: Status 404 returned error can't find the container with id e17490dea39add5e0130bd044bb73a93002ec6bb5e2552b250fb87db9e5b2a94 Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.867040 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerID="cff2be396cf5a5c008de6466ab1163b5a958d15302ed887a93d773703e2a4088" exitCode=0 Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.867532 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qd5j2" event={"ID":"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7","Type":"ContainerDied","Data":"cff2be396cf5a5c008de6466ab1163b5a958d15302ed887a93d773703e2a4088"} Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.867562 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qd5j2" event={"ID":"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7","Type":"ContainerStarted","Data":"e17490dea39add5e0130bd044bb73a93002ec6bb5e2552b250fb87db9e5b2a94"} Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.873539 4811 generic.go:334] "Generic (PLEG): container finished" podID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerID="e6342f91b3ff882d646db0e942b079174ac3d806079c9aa9f6672d846807b6df" exitCode=0 Jan 28 16:58:27 crc kubenswrapper[4811]: I0128 16:58:27.873583 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzrdr" event={"ID":"5ccbf5af-b62f-4082-823d-6ee73c92897a","Type":"ContainerDied","Data":"e6342f91b3ff882d646db0e942b079174ac3d806079c9aa9f6672d846807b6df"} Jan 28 16:58:28 crc kubenswrapper[4811]: I0128 16:58:28.882315 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzrdr" event={"ID":"5ccbf5af-b62f-4082-823d-6ee73c92897a","Type":"ContainerStarted","Data":"cb9267f166a0ec60690fc734eaf19eef57ceeeeb30c3088ca449d5af1bf43ddd"} Jan 28 16:58:28 crc kubenswrapper[4811]: I0128 16:58:28.903963 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dzrdr" podStartSLOduration=2.441077335 podStartE2EDuration="4.903942921s" podCreationTimestamp="2026-01-28 16:58:24 +0000 UTC" firstStartedPulling="2026-01-28 16:58:25.854085778 +0000 UTC m=+4398.608449361" lastFinishedPulling="2026-01-28 16:58:28.316951364 +0000 UTC m=+4401.071314947" observedRunningTime="2026-01-28 16:58:28.901693989 +0000 UTC m=+4401.656057602" watchObservedRunningTime="2026-01-28 16:58:28.903942921 +0000 UTC m=+4401.658306504" Jan 28 16:58:29 crc kubenswrapper[4811]: I0128 16:58:29.891187 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerID="b81136bb75cc8738a6e32124c4176a848881e4e1ba8b7464f93d728f15e7c05b" exitCode=0 Jan 28 16:58:29 crc kubenswrapper[4811]: I0128 16:58:29.891292 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qd5j2" event={"ID":"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7","Type":"ContainerDied","Data":"b81136bb75cc8738a6e32124c4176a848881e4e1ba8b7464f93d728f15e7c05b"} Jan 28 16:58:30 crc kubenswrapper[4811]: I0128 16:58:30.899760 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qd5j2" event={"ID":"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7","Type":"ContainerStarted","Data":"c76481dbaa10774463e12bb7717fd820c462f82666c799d84add1b8300806ea3"} Jan 28 16:58:30 crc kubenswrapper[4811]: I0128 16:58:30.923657 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qd5j2" podStartSLOduration=1.11704874 podStartE2EDuration="3.923638217s" podCreationTimestamp="2026-01-28 16:58:27 +0000 UTC" firstStartedPulling="2026-01-28 16:58:27.869073346 +0000 UTC m=+4400.623436929" lastFinishedPulling="2026-01-28 16:58:30.675662823 +0000 UTC m=+4403.430026406" observedRunningTime="2026-01-28 16:58:30.919548255 +0000 UTC m=+4403.673911838" watchObservedRunningTime="2026-01-28 16:58:30.923638217 +0000 UTC m=+4403.678001800" Jan 28 16:58:31 crc kubenswrapper[4811]: I0128 16:58:31.340302 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:58:31 crc kubenswrapper[4811]: E0128 16:58:31.340665 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:58:34 crc kubenswrapper[4811]: I0128 16:58:34.395498 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:34 crc kubenswrapper[4811]: I0128 16:58:34.395910 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:34 crc kubenswrapper[4811]: I0128 16:58:34.633627 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:34 crc kubenswrapper[4811]: I0128 16:58:34.965263 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:36 crc kubenswrapper[4811]: I0128 16:58:36.041503 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dzrdr"] Jan 28 16:58:36 crc kubenswrapper[4811]: I0128 16:58:36.939981 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dzrdr" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="registry-server" containerID="cri-o://cb9267f166a0ec60690fc734eaf19eef57ceeeeb30c3088ca449d5af1bf43ddd" gracePeriod=2 Jan 28 16:58:37 crc kubenswrapper[4811]: I0128 16:58:37.383380 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:37 crc kubenswrapper[4811]: I0128 16:58:37.383456 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:37 crc kubenswrapper[4811]: I0128 16:58:37.450906 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:37 crc kubenswrapper[4811]: I0128 16:58:37.949360 4811 generic.go:334] "Generic (PLEG): container finished" podID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerID="cb9267f166a0ec60690fc734eaf19eef57ceeeeb30c3088ca449d5af1bf43ddd" exitCode=0 Jan 28 16:58:37 crc kubenswrapper[4811]: I0128 16:58:37.949464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzrdr" event={"ID":"5ccbf5af-b62f-4082-823d-6ee73c92897a","Type":"ContainerDied","Data":"cb9267f166a0ec60690fc734eaf19eef57ceeeeb30c3088ca449d5af1bf43ddd"} Jan 28 16:58:37 crc kubenswrapper[4811]: I0128 16:58:37.996049 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:39 crc kubenswrapper[4811]: I0128 16:58:39.642316 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qd5j2"] Jan 28 16:58:39 crc kubenswrapper[4811]: I0128 16:58:39.964005 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qd5j2" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="registry-server" containerID="cri-o://c76481dbaa10774463e12bb7717fd820c462f82666c799d84add1b8300806ea3" gracePeriod=2 Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.282064 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.395945 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-catalog-content\") pod \"5ccbf5af-b62f-4082-823d-6ee73c92897a\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.396109 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-utilities\") pod \"5ccbf5af-b62f-4082-823d-6ee73c92897a\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.396177 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc78k\" (UniqueName: \"kubernetes.io/projected/5ccbf5af-b62f-4082-823d-6ee73c92897a-kube-api-access-jc78k\") pod \"5ccbf5af-b62f-4082-823d-6ee73c92897a\" (UID: \"5ccbf5af-b62f-4082-823d-6ee73c92897a\") " Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.396959 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-utilities" (OuterVolumeSpecName: "utilities") pod "5ccbf5af-b62f-4082-823d-6ee73c92897a" (UID: "5ccbf5af-b62f-4082-823d-6ee73c92897a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.397587 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.401789 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ccbf5af-b62f-4082-823d-6ee73c92897a-kube-api-access-jc78k" (OuterVolumeSpecName: "kube-api-access-jc78k") pod "5ccbf5af-b62f-4082-823d-6ee73c92897a" (UID: "5ccbf5af-b62f-4082-823d-6ee73c92897a"). InnerVolumeSpecName "kube-api-access-jc78k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.499406 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc78k\" (UniqueName: \"kubernetes.io/projected/5ccbf5af-b62f-4082-823d-6ee73c92897a-kube-api-access-jc78k\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.705009 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ccbf5af-b62f-4082-823d-6ee73c92897a" (UID: "5ccbf5af-b62f-4082-823d-6ee73c92897a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.803793 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ccbf5af-b62f-4082-823d-6ee73c92897a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.973168 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzrdr" event={"ID":"5ccbf5af-b62f-4082-823d-6ee73c92897a","Type":"ContainerDied","Data":"a1c697def7d186efcfb33ba4673606c37cb23b72671de615c9276609cd0747b7"} Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.973182 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzrdr" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.973238 4811 scope.go:117] "RemoveContainer" containerID="cb9267f166a0ec60690fc734eaf19eef57ceeeeb30c3088ca449d5af1bf43ddd" Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.980322 4811 generic.go:334] "Generic (PLEG): container finished" podID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerID="c76481dbaa10774463e12bb7717fd820c462f82666c799d84add1b8300806ea3" exitCode=0 Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.980361 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qd5j2" event={"ID":"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7","Type":"ContainerDied","Data":"c76481dbaa10774463e12bb7717fd820c462f82666c799d84add1b8300806ea3"} Jan 28 16:58:40 crc kubenswrapper[4811]: I0128 16:58:40.996403 4811 scope.go:117] "RemoveContainer" containerID="e6342f91b3ff882d646db0e942b079174ac3d806079c9aa9f6672d846807b6df" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.022294 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dzrdr"] Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.023386 4811 scope.go:117] "RemoveContainer" containerID="3c22afdb78781e935e96c68e674d373f5d584813c0b7dd9bd64a645edf82241c" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.028935 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dzrdr"] Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.211714 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.311553 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-catalog-content\") pod \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.311952 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-utilities\") pod \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.312120 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvtpd\" (UniqueName: \"kubernetes.io/projected/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-kube-api-access-dvtpd\") pod \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\" (UID: \"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7\") " Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.314082 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-utilities" (OuterVolumeSpecName: "utilities") pod "dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" (UID: "dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.314364 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.315519 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-kube-api-access-dvtpd" (OuterVolumeSpecName: "kube-api-access-dvtpd") pod "dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" (UID: "dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7"). InnerVolumeSpecName "kube-api-access-dvtpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.337233 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" (UID: "dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.415675 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvtpd\" (UniqueName: \"kubernetes.io/projected/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-kube-api-access-dvtpd\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.415707 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.988788 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qd5j2" event={"ID":"dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7","Type":"ContainerDied","Data":"e17490dea39add5e0130bd044bb73a93002ec6bb5e2552b250fb87db9e5b2a94"} Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.988852 4811 scope.go:117] "RemoveContainer" containerID="c76481dbaa10774463e12bb7717fd820c462f82666c799d84add1b8300806ea3" Jan 28 16:58:41 crc kubenswrapper[4811]: I0128 16:58:41.988848 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qd5j2" Jan 28 16:58:42 crc kubenswrapper[4811]: I0128 16:58:42.008729 4811 scope.go:117] "RemoveContainer" containerID="b81136bb75cc8738a6e32124c4176a848881e4e1ba8b7464f93d728f15e7c05b" Jan 28 16:58:42 crc kubenswrapper[4811]: I0128 16:58:42.028666 4811 scope.go:117] "RemoveContainer" containerID="cff2be396cf5a5c008de6466ab1163b5a958d15302ed887a93d773703e2a4088" Jan 28 16:58:42 crc kubenswrapper[4811]: I0128 16:58:42.036769 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qd5j2"] Jan 28 16:58:42 crc kubenswrapper[4811]: I0128 16:58:42.042667 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qd5j2"] Jan 28 16:58:42 crc kubenswrapper[4811]: I0128 16:58:42.349569 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" path="/var/lib/kubelet/pods/5ccbf5af-b62f-4082-823d-6ee73c92897a/volumes" Jan 28 16:58:42 crc kubenswrapper[4811]: I0128 16:58:42.350151 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" path="/var/lib/kubelet/pods/dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7/volumes" Jan 28 16:58:44 crc kubenswrapper[4811]: I0128 16:58:44.339445 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:58:44 crc kubenswrapper[4811]: E0128 16:58:44.339843 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:58:55 crc kubenswrapper[4811]: I0128 16:58:55.340067 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:58:55 crc kubenswrapper[4811]: E0128 16:58:55.341593 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:59:10 crc kubenswrapper[4811]: I0128 16:59:10.340215 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:59:10 crc kubenswrapper[4811]: E0128 16:59:10.340946 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:59:24 crc kubenswrapper[4811]: I0128 16:59:24.339657 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:59:24 crc kubenswrapper[4811]: E0128 16:59:24.340366 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:59:39 crc kubenswrapper[4811]: I0128 16:59:39.339398 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:59:39 crc kubenswrapper[4811]: E0128 16:59:39.341450 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 16:59:53 crc kubenswrapper[4811]: I0128 16:59:53.339794 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 16:59:53 crc kubenswrapper[4811]: E0128 16:59:53.340740 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.179849 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594"] Jan 28 17:00:00 crc kubenswrapper[4811]: E0128 17:00:00.180789 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="registry-server" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.180804 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="registry-server" Jan 28 17:00:00 crc kubenswrapper[4811]: E0128 17:00:00.180821 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="registry-server" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.180830 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="registry-server" Jan 28 17:00:00 crc kubenswrapper[4811]: E0128 17:00:00.180847 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="extract-content" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.180855 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="extract-content" Jan 28 17:00:00 crc kubenswrapper[4811]: E0128 17:00:00.180871 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="extract-utilities" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.180877 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="extract-utilities" Jan 28 17:00:00 crc kubenswrapper[4811]: E0128 17:00:00.180899 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="extract-content" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.180906 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="extract-content" Jan 28 17:00:00 crc kubenswrapper[4811]: E0128 17:00:00.180920 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="extract-utilities" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.180929 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="extract-utilities" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.181073 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc33ce5e-9ed1-4b65-a8f6-58bff62e8de7" containerName="registry-server" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.181092 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ccbf5af-b62f-4082-823d-6ee73c92897a" containerName="registry-server" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.181594 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.185327 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.185684 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.189323 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594"] Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.230455 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgv27\" (UniqueName: \"kubernetes.io/projected/25e9aa33-6d76-454b-8bec-5bdebdc5affc-kube-api-access-pgv27\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.230592 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25e9aa33-6d76-454b-8bec-5bdebdc5affc-config-volume\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.230727 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25e9aa33-6d76-454b-8bec-5bdebdc5affc-secret-volume\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.332152 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25e9aa33-6d76-454b-8bec-5bdebdc5affc-config-volume\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.332203 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25e9aa33-6d76-454b-8bec-5bdebdc5affc-secret-volume\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.332281 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgv27\" (UniqueName: \"kubernetes.io/projected/25e9aa33-6d76-454b-8bec-5bdebdc5affc-kube-api-access-pgv27\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.333097 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25e9aa33-6d76-454b-8bec-5bdebdc5affc-config-volume\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.338597 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25e9aa33-6d76-454b-8bec-5bdebdc5affc-secret-volume\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.349138 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgv27\" (UniqueName: \"kubernetes.io/projected/25e9aa33-6d76-454b-8bec-5bdebdc5affc-kube-api-access-pgv27\") pod \"collect-profiles-29493660-fq594\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.509599 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:00 crc kubenswrapper[4811]: I0128 17:00:00.911332 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594"] Jan 28 17:00:00 crc kubenswrapper[4811]: W0128 17:00:00.923712 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25e9aa33_6d76_454b_8bec_5bdebdc5affc.slice/crio-933f89a944fd7eb408bbf79c9db83ab2df2948c120a6301be56d56976032c912 WatchSource:0}: Error finding container 933f89a944fd7eb408bbf79c9db83ab2df2948c120a6301be56d56976032c912: Status 404 returned error can't find the container with id 933f89a944fd7eb408bbf79c9db83ab2df2948c120a6301be56d56976032c912 Jan 28 17:00:01 crc kubenswrapper[4811]: I0128 17:00:01.538176 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" event={"ID":"25e9aa33-6d76-454b-8bec-5bdebdc5affc","Type":"ContainerStarted","Data":"9ada8ef4070520e747e5ea970a70a63de5b86abd9776f11fd0ddf6608bb52b39"} Jan 28 17:00:01 crc kubenswrapper[4811]: I0128 17:00:01.538229 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" event={"ID":"25e9aa33-6d76-454b-8bec-5bdebdc5affc","Type":"ContainerStarted","Data":"933f89a944fd7eb408bbf79c9db83ab2df2948c120a6301be56d56976032c912"} Jan 28 17:00:01 crc kubenswrapper[4811]: I0128 17:00:01.557013 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" podStartSLOduration=1.556989405 podStartE2EDuration="1.556989405s" podCreationTimestamp="2026-01-28 17:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:00:01.549904492 +0000 UTC m=+4494.304268075" watchObservedRunningTime="2026-01-28 17:00:01.556989405 +0000 UTC m=+4494.311352988" Jan 28 17:00:02 crc kubenswrapper[4811]: I0128 17:00:02.546442 4811 generic.go:334] "Generic (PLEG): container finished" podID="25e9aa33-6d76-454b-8bec-5bdebdc5affc" containerID="9ada8ef4070520e747e5ea970a70a63de5b86abd9776f11fd0ddf6608bb52b39" exitCode=0 Jan 28 17:00:02 crc kubenswrapper[4811]: I0128 17:00:02.546568 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" event={"ID":"25e9aa33-6d76-454b-8bec-5bdebdc5affc","Type":"ContainerDied","Data":"9ada8ef4070520e747e5ea970a70a63de5b86abd9776f11fd0ddf6608bb52b39"} Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.817589 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.882707 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25e9aa33-6d76-454b-8bec-5bdebdc5affc-config-volume\") pod \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.882815 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25e9aa33-6d76-454b-8bec-5bdebdc5affc-secret-volume\") pod \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.882844 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgv27\" (UniqueName: \"kubernetes.io/projected/25e9aa33-6d76-454b-8bec-5bdebdc5affc-kube-api-access-pgv27\") pod \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\" (UID: \"25e9aa33-6d76-454b-8bec-5bdebdc5affc\") " Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.883408 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e9aa33-6d76-454b-8bec-5bdebdc5affc-config-volume" (OuterVolumeSpecName: "config-volume") pod "25e9aa33-6d76-454b-8bec-5bdebdc5affc" (UID: "25e9aa33-6d76-454b-8bec-5bdebdc5affc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.889322 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e9aa33-6d76-454b-8bec-5bdebdc5affc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "25e9aa33-6d76-454b-8bec-5bdebdc5affc" (UID: "25e9aa33-6d76-454b-8bec-5bdebdc5affc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.889317 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e9aa33-6d76-454b-8bec-5bdebdc5affc-kube-api-access-pgv27" (OuterVolumeSpecName: "kube-api-access-pgv27") pod "25e9aa33-6d76-454b-8bec-5bdebdc5affc" (UID: "25e9aa33-6d76-454b-8bec-5bdebdc5affc"). InnerVolumeSpecName "kube-api-access-pgv27". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.984394 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25e9aa33-6d76-454b-8bec-5bdebdc5affc-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.984456 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25e9aa33-6d76-454b-8bec-5bdebdc5affc-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:00:03 crc kubenswrapper[4811]: I0128 17:00:03.984474 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgv27\" (UniqueName: \"kubernetes.io/projected/25e9aa33-6d76-454b-8bec-5bdebdc5affc-kube-api-access-pgv27\") on node \"crc\" DevicePath \"\"" Jan 28 17:00:04 crc kubenswrapper[4811]: I0128 17:00:04.560191 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" event={"ID":"25e9aa33-6d76-454b-8bec-5bdebdc5affc","Type":"ContainerDied","Data":"933f89a944fd7eb408bbf79c9db83ab2df2948c120a6301be56d56976032c912"} Jan 28 17:00:04 crc kubenswrapper[4811]: I0128 17:00:04.560530 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="933f89a944fd7eb408bbf79c9db83ab2df2948c120a6301be56d56976032c912" Jan 28 17:00:04 crc kubenswrapper[4811]: I0128 17:00:04.560652 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594" Jan 28 17:00:04 crc kubenswrapper[4811]: I0128 17:00:04.617852 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6"] Jan 28 17:00:04 crc kubenswrapper[4811]: I0128 17:00:04.635800 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493615-cdnh6"] Jan 28 17:00:05 crc kubenswrapper[4811]: I0128 17:00:05.339338 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 17:00:05 crc kubenswrapper[4811]: E0128 17:00:05.339615 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:00:06 crc kubenswrapper[4811]: I0128 17:00:06.350410 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e7b42c-f027-4446-8fcb-1f9cb9ca6691" path="/var/lib/kubelet/pods/27e7b42c-f027-4446-8fcb-1f9cb9ca6691/volumes" Jan 28 17:00:19 crc kubenswrapper[4811]: I0128 17:00:19.339479 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 17:00:19 crc kubenswrapper[4811]: E0128 17:00:19.340303 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:00:29 crc kubenswrapper[4811]: I0128 17:00:29.841283 4811 scope.go:117] "RemoveContainer" containerID="4b60a62e58b3b608ad45cb758e4b9e7bae51b82799ee4ad49da0e0f6e7a25fbd" Jan 28 17:00:31 crc kubenswrapper[4811]: I0128 17:00:31.339471 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 17:00:31 crc kubenswrapper[4811]: E0128 17:00:31.339830 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:00:42 crc kubenswrapper[4811]: I0128 17:00:42.339476 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 17:00:42 crc kubenswrapper[4811]: I0128 17:00:42.822740 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"5bb331d50da91e956ddfd0ead12512bba7568cb388527ac7ffc8cf3f494a2d49"} Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.375118 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-tx9bl"] Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.382371 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-tx9bl"] Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.503810 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-m4xkj"] Jan 28 17:02:37 crc kubenswrapper[4811]: E0128 17:02:37.504156 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e9aa33-6d76-454b-8bec-5bdebdc5affc" containerName="collect-profiles" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.504174 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e9aa33-6d76-454b-8bec-5bdebdc5affc" containerName="collect-profiles" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.504307 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e9aa33-6d76-454b-8bec-5bdebdc5affc" containerName="collect-profiles" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.504875 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.507879 4811 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-jbh6j" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.507927 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.508041 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.508779 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.517235 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-m4xkj"] Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.674570 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdz9f\" (UniqueName: \"kubernetes.io/projected/7b773b0e-5010-4132-aadf-0de81f89ef9c-kube-api-access-bdz9f\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.674670 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7b773b0e-5010-4132-aadf-0de81f89ef9c-node-mnt\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.674720 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7b773b0e-5010-4132-aadf-0de81f89ef9c-crc-storage\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.776032 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdz9f\" (UniqueName: \"kubernetes.io/projected/7b773b0e-5010-4132-aadf-0de81f89ef9c-kube-api-access-bdz9f\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.776126 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7b773b0e-5010-4132-aadf-0de81f89ef9c-node-mnt\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.776177 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7b773b0e-5010-4132-aadf-0de81f89ef9c-crc-storage\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.776618 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7b773b0e-5010-4132-aadf-0de81f89ef9c-node-mnt\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.777115 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7b773b0e-5010-4132-aadf-0de81f89ef9c-crc-storage\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.807143 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdz9f\" (UniqueName: \"kubernetes.io/projected/7b773b0e-5010-4132-aadf-0de81f89ef9c-kube-api-access-bdz9f\") pod \"crc-storage-crc-m4xkj\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:37 crc kubenswrapper[4811]: I0128 17:02:37.826568 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:38 crc kubenswrapper[4811]: I0128 17:02:38.302368 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-m4xkj"] Jan 28 17:02:38 crc kubenswrapper[4811]: I0128 17:02:38.305368 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:02:38 crc kubenswrapper[4811]: I0128 17:02:38.371068 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="325e4634-8f82-469a-a1d6-0b9f0364f1dc" path="/var/lib/kubelet/pods/325e4634-8f82-469a-a1d6-0b9f0364f1dc/volumes" Jan 28 17:02:38 crc kubenswrapper[4811]: I0128 17:02:38.562935 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-m4xkj" event={"ID":"7b773b0e-5010-4132-aadf-0de81f89ef9c","Type":"ContainerStarted","Data":"c011dee0724badf696348b6cf2dc8daf982aae64c79157ef9eac0284ce090122"} Jan 28 17:02:39 crc kubenswrapper[4811]: I0128 17:02:39.571139 4811 generic.go:334] "Generic (PLEG): container finished" podID="7b773b0e-5010-4132-aadf-0de81f89ef9c" containerID="331128c1b82f83cc760c7816874ada2dba6116550501e6e7e88f5a9b2777c088" exitCode=0 Jan 28 17:02:39 crc kubenswrapper[4811]: I0128 17:02:39.571229 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-m4xkj" event={"ID":"7b773b0e-5010-4132-aadf-0de81f89ef9c","Type":"ContainerDied","Data":"331128c1b82f83cc760c7816874ada2dba6116550501e6e7e88f5a9b2777c088"} Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.828792 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.935289 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7b773b0e-5010-4132-aadf-0de81f89ef9c-node-mnt\") pod \"7b773b0e-5010-4132-aadf-0de81f89ef9c\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.935371 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7b773b0e-5010-4132-aadf-0de81f89ef9c-crc-storage\") pod \"7b773b0e-5010-4132-aadf-0de81f89ef9c\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.935506 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdz9f\" (UniqueName: \"kubernetes.io/projected/7b773b0e-5010-4132-aadf-0de81f89ef9c-kube-api-access-bdz9f\") pod \"7b773b0e-5010-4132-aadf-0de81f89ef9c\" (UID: \"7b773b0e-5010-4132-aadf-0de81f89ef9c\") " Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.935611 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7b773b0e-5010-4132-aadf-0de81f89ef9c-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "7b773b0e-5010-4132-aadf-0de81f89ef9c" (UID: "7b773b0e-5010-4132-aadf-0de81f89ef9c"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.935840 4811 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7b773b0e-5010-4132-aadf-0de81f89ef9c-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.941808 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b773b0e-5010-4132-aadf-0de81f89ef9c-kube-api-access-bdz9f" (OuterVolumeSpecName: "kube-api-access-bdz9f") pod "7b773b0e-5010-4132-aadf-0de81f89ef9c" (UID: "7b773b0e-5010-4132-aadf-0de81f89ef9c"). InnerVolumeSpecName "kube-api-access-bdz9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:02:40 crc kubenswrapper[4811]: I0128 17:02:40.955053 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b773b0e-5010-4132-aadf-0de81f89ef9c-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "7b773b0e-5010-4132-aadf-0de81f89ef9c" (UID: "7b773b0e-5010-4132-aadf-0de81f89ef9c"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:02:41 crc kubenswrapper[4811]: I0128 17:02:41.037612 4811 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7b773b0e-5010-4132-aadf-0de81f89ef9c-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 28 17:02:41 crc kubenswrapper[4811]: I0128 17:02:41.037653 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdz9f\" (UniqueName: \"kubernetes.io/projected/7b773b0e-5010-4132-aadf-0de81f89ef9c-kube-api-access-bdz9f\") on node \"crc\" DevicePath \"\"" Jan 28 17:02:41 crc kubenswrapper[4811]: I0128 17:02:41.588389 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-m4xkj" event={"ID":"7b773b0e-5010-4132-aadf-0de81f89ef9c","Type":"ContainerDied","Data":"c011dee0724badf696348b6cf2dc8daf982aae64c79157ef9eac0284ce090122"} Jan 28 17:02:41 crc kubenswrapper[4811]: I0128 17:02:41.588681 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c011dee0724badf696348b6cf2dc8daf982aae64c79157ef9eac0284ce090122" Jan 28 17:02:41 crc kubenswrapper[4811]: I0128 17:02:41.588472 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-m4xkj" Jan 28 17:02:42 crc kubenswrapper[4811]: I0128 17:02:42.935971 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-m4xkj"] Jan 28 17:02:42 crc kubenswrapper[4811]: I0128 17:02:42.943009 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-m4xkj"] Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.074234 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-xxq8z"] Jan 28 17:02:43 crc kubenswrapper[4811]: E0128 17:02:43.074651 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b773b0e-5010-4132-aadf-0de81f89ef9c" containerName="storage" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.074677 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b773b0e-5010-4132-aadf-0de81f89ef9c" containerName="storage" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.074878 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b773b0e-5010-4132-aadf-0de81f89ef9c" containerName="storage" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.075485 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.077388 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.081754 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.081923 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.084200 4811 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-jbh6j" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.089349 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xxq8z"] Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.168838 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d35c4311-a51a-42bb-a7b5-a2b34828cb36-crc-storage\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.168888 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d59q\" (UniqueName: \"kubernetes.io/projected/d35c4311-a51a-42bb-a7b5-a2b34828cb36-kube-api-access-2d59q\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.168912 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d35c4311-a51a-42bb-a7b5-a2b34828cb36-node-mnt\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.269975 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d35c4311-a51a-42bb-a7b5-a2b34828cb36-crc-storage\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.270030 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d59q\" (UniqueName: \"kubernetes.io/projected/d35c4311-a51a-42bb-a7b5-a2b34828cb36-kube-api-access-2d59q\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.270056 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d35c4311-a51a-42bb-a7b5-a2b34828cb36-node-mnt\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.270457 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d35c4311-a51a-42bb-a7b5-a2b34828cb36-node-mnt\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.270730 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d35c4311-a51a-42bb-a7b5-a2b34828cb36-crc-storage\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.395066 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d59q\" (UniqueName: \"kubernetes.io/projected/d35c4311-a51a-42bb-a7b5-a2b34828cb36-kube-api-access-2d59q\") pod \"crc-storage-crc-xxq8z\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:43 crc kubenswrapper[4811]: I0128 17:02:43.691468 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:44 crc kubenswrapper[4811]: I0128 17:02:44.229358 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xxq8z"] Jan 28 17:02:44 crc kubenswrapper[4811]: I0128 17:02:44.350141 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b773b0e-5010-4132-aadf-0de81f89ef9c" path="/var/lib/kubelet/pods/7b773b0e-5010-4132-aadf-0de81f89ef9c/volumes" Jan 28 17:02:44 crc kubenswrapper[4811]: I0128 17:02:44.608371 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xxq8z" event={"ID":"d35c4311-a51a-42bb-a7b5-a2b34828cb36","Type":"ContainerStarted","Data":"673e46fd06b74edb14cb484ddf6c70aa8d7dca9ac27d1b0d82e231233ca4c954"} Jan 28 17:02:45 crc kubenswrapper[4811]: I0128 17:02:45.617861 4811 generic.go:334] "Generic (PLEG): container finished" podID="d35c4311-a51a-42bb-a7b5-a2b34828cb36" containerID="04b8dcdab3b244b67f8f27ade2d4d76dd5a651edd13590ff7f52a53254db02a1" exitCode=0 Jan 28 17:02:45 crc kubenswrapper[4811]: I0128 17:02:45.617936 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xxq8z" event={"ID":"d35c4311-a51a-42bb-a7b5-a2b34828cb36","Type":"ContainerDied","Data":"04b8dcdab3b244b67f8f27ade2d4d76dd5a651edd13590ff7f52a53254db02a1"} Jan 28 17:02:46 crc kubenswrapper[4811]: I0128 17:02:46.862212 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.024485 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d59q\" (UniqueName: \"kubernetes.io/projected/d35c4311-a51a-42bb-a7b5-a2b34828cb36-kube-api-access-2d59q\") pod \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.024542 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d35c4311-a51a-42bb-a7b5-a2b34828cb36-node-mnt\") pod \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.024589 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d35c4311-a51a-42bb-a7b5-a2b34828cb36-crc-storage\") pod \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\" (UID: \"d35c4311-a51a-42bb-a7b5-a2b34828cb36\") " Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.025881 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d35c4311-a51a-42bb-a7b5-a2b34828cb36-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d35c4311-a51a-42bb-a7b5-a2b34828cb36" (UID: "d35c4311-a51a-42bb-a7b5-a2b34828cb36"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.031413 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d35c4311-a51a-42bb-a7b5-a2b34828cb36-kube-api-access-2d59q" (OuterVolumeSpecName: "kube-api-access-2d59q") pod "d35c4311-a51a-42bb-a7b5-a2b34828cb36" (UID: "d35c4311-a51a-42bb-a7b5-a2b34828cb36"). InnerVolumeSpecName "kube-api-access-2d59q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.076885 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d35c4311-a51a-42bb-a7b5-a2b34828cb36-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d35c4311-a51a-42bb-a7b5-a2b34828cb36" (UID: "d35c4311-a51a-42bb-a7b5-a2b34828cb36"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.126122 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d59q\" (UniqueName: \"kubernetes.io/projected/d35c4311-a51a-42bb-a7b5-a2b34828cb36-kube-api-access-2d59q\") on node \"crc\" DevicePath \"\"" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.126174 4811 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d35c4311-a51a-42bb-a7b5-a2b34828cb36-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.126188 4811 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d35c4311-a51a-42bb-a7b5-a2b34828cb36-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.633449 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xxq8z" event={"ID":"d35c4311-a51a-42bb-a7b5-a2b34828cb36","Type":"ContainerDied","Data":"673e46fd06b74edb14cb484ddf6c70aa8d7dca9ac27d1b0d82e231233ca4c954"} Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.633694 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="673e46fd06b74edb14cb484ddf6c70aa8d7dca9ac27d1b0d82e231233ca4c954" Jan 28 17:02:47 crc kubenswrapper[4811]: I0128 17:02:47.633597 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xxq8z" Jan 28 17:03:03 crc kubenswrapper[4811]: I0128 17:03:03.087925 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:03:03 crc kubenswrapper[4811]: I0128 17:03:03.090568 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:03:29 crc kubenswrapper[4811]: I0128 17:03:29.919801 4811 scope.go:117] "RemoveContainer" containerID="1baae8a95ec118fb48ee83d472f5a4ced1ac9a74692d4755eb2f8907a281a9ac" Jan 28 17:03:33 crc kubenswrapper[4811]: I0128 17:03:33.087136 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:03:33 crc kubenswrapper[4811]: I0128 17:03:33.087692 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:04:03 crc kubenswrapper[4811]: I0128 17:04:03.087162 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:04:03 crc kubenswrapper[4811]: I0128 17:04:03.087791 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:04:03 crc kubenswrapper[4811]: I0128 17:04:03.087846 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:04:03 crc kubenswrapper[4811]: I0128 17:04:03.088466 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5bb331d50da91e956ddfd0ead12512bba7568cb388527ac7ffc8cf3f494a2d49"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:04:03 crc kubenswrapper[4811]: I0128 17:04:03.088523 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://5bb331d50da91e956ddfd0ead12512bba7568cb388527ac7ffc8cf3f494a2d49" gracePeriod=600 Jan 28 17:04:04 crc kubenswrapper[4811]: I0128 17:04:04.146331 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="5bb331d50da91e956ddfd0ead12512bba7568cb388527ac7ffc8cf3f494a2d49" exitCode=0 Jan 28 17:04:04 crc kubenswrapper[4811]: I0128 17:04:04.146395 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"5bb331d50da91e956ddfd0ead12512bba7568cb388527ac7ffc8cf3f494a2d49"} Jan 28 17:04:04 crc kubenswrapper[4811]: I0128 17:04:04.146953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc"} Jan 28 17:04:04 crc kubenswrapper[4811]: I0128 17:04:04.146981 4811 scope.go:117] "RemoveContainer" containerID="2f832fdc03f2c74b172c98339268d3805e889021c7e9a228d350776a1560d0b8" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.878580 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l524t"] Jan 28 17:04:50 crc kubenswrapper[4811]: E0128 17:04:50.879869 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d35c4311-a51a-42bb-a7b5-a2b34828cb36" containerName="storage" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.879896 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d35c4311-a51a-42bb-a7b5-a2b34828cb36" containerName="storage" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.880188 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d35c4311-a51a-42bb-a7b5-a2b34828cb36" containerName="storage" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.881833 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.887867 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l524t"] Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.952107 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-catalog-content\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.952395 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-utilities\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:50 crc kubenswrapper[4811]: I0128 17:04:50.952491 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh9bs\" (UniqueName: \"kubernetes.io/projected/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-kube-api-access-hh9bs\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.054367 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-utilities\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.054451 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh9bs\" (UniqueName: \"kubernetes.io/projected/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-kube-api-access-hh9bs\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.054494 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-catalog-content\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.055061 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-utilities\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.055117 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-catalog-content\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.082553 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh9bs\" (UniqueName: \"kubernetes.io/projected/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-kube-api-access-hh9bs\") pod \"community-operators-l524t\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.220781 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:04:51 crc kubenswrapper[4811]: I0128 17:04:51.715975 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l524t"] Jan 28 17:04:52 crc kubenswrapper[4811]: I0128 17:04:52.511227 4811 generic.go:334] "Generic (PLEG): container finished" podID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerID="f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c" exitCode=0 Jan 28 17:04:52 crc kubenswrapper[4811]: I0128 17:04:52.511273 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l524t" event={"ID":"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff","Type":"ContainerDied","Data":"f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c"} Jan 28 17:04:52 crc kubenswrapper[4811]: I0128 17:04:52.511836 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l524t" event={"ID":"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff","Type":"ContainerStarted","Data":"9521d6e701d32bc006a62960cd9b7b4e8bf0fa3ff704aaf47ced8fd0f1db789d"} Jan 28 17:04:55 crc kubenswrapper[4811]: I0128 17:04:55.534114 4811 generic.go:334] "Generic (PLEG): container finished" podID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerID="3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd" exitCode=0 Jan 28 17:04:55 crc kubenswrapper[4811]: I0128 17:04:55.534198 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l524t" event={"ID":"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff","Type":"ContainerDied","Data":"3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd"} Jan 28 17:04:57 crc kubenswrapper[4811]: I0128 17:04:57.551987 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l524t" event={"ID":"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff","Type":"ContainerStarted","Data":"36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58"} Jan 28 17:04:57 crc kubenswrapper[4811]: I0128 17:04:57.576176 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l524t" podStartSLOduration=3.273580454 podStartE2EDuration="7.576157796s" podCreationTimestamp="2026-01-28 17:04:50 +0000 UTC" firstStartedPulling="2026-01-28 17:04:52.513151924 +0000 UTC m=+4785.267515507" lastFinishedPulling="2026-01-28 17:04:56.815729266 +0000 UTC m=+4789.570092849" observedRunningTime="2026-01-28 17:04:57.570331518 +0000 UTC m=+4790.324695121" watchObservedRunningTime="2026-01-28 17:04:57.576157796 +0000 UTC m=+4790.330521379" Jan 28 17:05:01 crc kubenswrapper[4811]: I0128 17:05:01.221315 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:05:01 crc kubenswrapper[4811]: I0128 17:05:01.221604 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:05:01 crc kubenswrapper[4811]: I0128 17:05:01.265077 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:05:11 crc kubenswrapper[4811]: I0128 17:05:11.262004 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:05:11 crc kubenswrapper[4811]: I0128 17:05:11.307648 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l524t"] Jan 28 17:05:11 crc kubenswrapper[4811]: I0128 17:05:11.648561 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l524t" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="registry-server" containerID="cri-o://36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58" gracePeriod=2 Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.644176 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.667235 4811 generic.go:334] "Generic (PLEG): container finished" podID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerID="36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58" exitCode=0 Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.667268 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l524t" event={"ID":"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff","Type":"ContainerDied","Data":"36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58"} Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.667297 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l524t" event={"ID":"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff","Type":"ContainerDied","Data":"9521d6e701d32bc006a62960cd9b7b4e8bf0fa3ff704aaf47ced8fd0f1db789d"} Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.667354 4811 scope.go:117] "RemoveContainer" containerID="36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.667549 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l524t" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.684047 4811 scope.go:117] "RemoveContainer" containerID="3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.720399 4811 scope.go:117] "RemoveContainer" containerID="f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.747624 4811 scope.go:117] "RemoveContainer" containerID="36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58" Jan 28 17:05:12 crc kubenswrapper[4811]: E0128 17:05:12.750818 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58\": container with ID starting with 36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58 not found: ID does not exist" containerID="36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.750870 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58"} err="failed to get container status \"36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58\": rpc error: code = NotFound desc = could not find container \"36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58\": container with ID starting with 36ab1ab7530761af6a7b7a4c816f73657c78cc2f3f43f48fdb34547a8e2d0d58 not found: ID does not exist" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.750904 4811 scope.go:117] "RemoveContainer" containerID="3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd" Jan 28 17:05:12 crc kubenswrapper[4811]: E0128 17:05:12.751303 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd\": container with ID starting with 3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd not found: ID does not exist" containerID="3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.751336 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd"} err="failed to get container status \"3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd\": rpc error: code = NotFound desc = could not find container \"3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd\": container with ID starting with 3453067d884ade505bc77eada87ec00df69bac9a9cc6a4920bc315082b84aebd not found: ID does not exist" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.751358 4811 scope.go:117] "RemoveContainer" containerID="f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c" Jan 28 17:05:12 crc kubenswrapper[4811]: E0128 17:05:12.751608 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c\": container with ID starting with f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c not found: ID does not exist" containerID="f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.751634 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c"} err="failed to get container status \"f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c\": rpc error: code = NotFound desc = could not find container \"f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c\": container with ID starting with f4f955087e3c5fb648118bfc17e0edf117d88a252fb9e8100df05afceee9983c not found: ID does not exist" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.763692 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-utilities\") pod \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.763775 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-catalog-content\") pod \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.763829 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh9bs\" (UniqueName: \"kubernetes.io/projected/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-kube-api-access-hh9bs\") pod \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\" (UID: \"9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff\") " Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.765235 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-utilities" (OuterVolumeSpecName: "utilities") pod "9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" (UID: "9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.769197 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-kube-api-access-hh9bs" (OuterVolumeSpecName: "kube-api-access-hh9bs") pod "9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" (UID: "9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff"). InnerVolumeSpecName "kube-api-access-hh9bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.820693 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" (UID: "9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.865879 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.865931 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:05:12 crc kubenswrapper[4811]: I0128 17:05:12.865946 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh9bs\" (UniqueName: \"kubernetes.io/projected/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff-kube-api-access-hh9bs\") on node \"crc\" DevicePath \"\"" Jan 28 17:05:13 crc kubenswrapper[4811]: I0128 17:05:13.009856 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l524t"] Jan 28 17:05:13 crc kubenswrapper[4811]: I0128 17:05:13.015872 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l524t"] Jan 28 17:05:14 crc kubenswrapper[4811]: I0128 17:05:14.349170 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" path="/var/lib/kubelet/pods/9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff/volumes" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.263405 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95587bc99-2zrdl"] Jan 28 17:05:52 crc kubenswrapper[4811]: E0128 17:05:52.264546 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="extract-content" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.264564 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="extract-content" Jan 28 17:05:52 crc kubenswrapper[4811]: E0128 17:05:52.264619 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="extract-utilities" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.264629 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="extract-utilities" Jan 28 17:05:52 crc kubenswrapper[4811]: E0128 17:05:52.264673 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="registry-server" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.264684 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="registry-server" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.264944 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ad3d9b5-9cd4-4b2a-8ac9-c3075f795fff" containerName="registry-server" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.266348 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.270912 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.270932 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-97jvn" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.270951 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.271137 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.271398 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.284945 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-2zrdl"] Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.350598 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-config\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.350650 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm76n\" (UniqueName: \"kubernetes.io/projected/48d869be-3f1e-4fe1-93ec-fdd211c9045f-kube-api-access-rm76n\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.350687 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-dns-svc\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.452171 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-config\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.452240 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm76n\" (UniqueName: \"kubernetes.io/projected/48d869be-3f1e-4fe1-93ec-fdd211c9045f-kube-api-access-rm76n\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.452282 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-dns-svc\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.453063 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-config\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.453334 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-dns-svc\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.482149 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm76n\" (UniqueName: \"kubernetes.io/projected/48d869be-3f1e-4fe1-93ec-fdd211c9045f-kube-api-access-rm76n\") pod \"dnsmasq-dns-95587bc99-2zrdl\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.496533 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-qpvvf"] Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.497886 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.506027 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-qpvvf"] Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.609548 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.657503 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdv4h\" (UniqueName: \"kubernetes.io/projected/d7f58280-b425-496c-865a-f2f9bdc31e49-kube-api-access-vdv4h\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.657745 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-config\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.657986 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.765257 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdv4h\" (UniqueName: \"kubernetes.io/projected/d7f58280-b425-496c-865a-f2f9bdc31e49-kube-api-access-vdv4h\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.765585 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-config\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.765713 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.766833 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.769282 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-config\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.785527 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdv4h\" (UniqueName: \"kubernetes.io/projected/d7f58280-b425-496c-865a-f2f9bdc31e49-kube-api-access-vdv4h\") pod \"dnsmasq-dns-5d79f765b5-qpvvf\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:52 crc kubenswrapper[4811]: I0128 17:05:52.814878 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.080175 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-2zrdl"] Jan 28 17:05:53 crc kubenswrapper[4811]: W0128 17:05:53.275309 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7f58280_b425_496c_865a_f2f9bdc31e49.slice/crio-e9a7f300644a963a5a5b77789618aa15a1fd024c91f229d7529305dfd56b9c82 WatchSource:0}: Error finding container e9a7f300644a963a5a5b77789618aa15a1fd024c91f229d7529305dfd56b9c82: Status 404 returned error can't find the container with id e9a7f300644a963a5a5b77789618aa15a1fd024c91f229d7529305dfd56b9c82 Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.275780 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-qpvvf"] Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.350694 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.352583 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.356588 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-k6phq" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.356878 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.356906 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.357001 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.357417 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.362500 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479391 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s98v\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-kube-api-access-6s98v\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479519 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479545 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479605 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479632 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479655 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479711 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479739 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.479784 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581567 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581626 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581666 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581721 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581740 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581781 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581802 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581822 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.581888 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s98v\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-kube-api-access-6s98v\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.583052 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.583321 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.583596 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.584555 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.589791 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.589865 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.589896 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9838b56633f794a95a8a47cbe108341f22ea29055f81181b011c7440ab2d0112/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.591068 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.597283 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.602593 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s98v\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-kube-api-access-6s98v\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.618840 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.620228 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.620305 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.626946 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.641259 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.641323 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.641487 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.641593 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-x7595" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.641676 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.688888 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.785857 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.785910 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.785957 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gzhz\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-kube-api-access-9gzhz\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.786006 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.786120 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c942249-251a-4607-9f2d-c14456dd5264-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.786188 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.786219 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.786283 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.786317 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c942249-251a-4607-9f2d-c14456dd5264-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.888743 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.888807 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c942249-251a-4607-9f2d-c14456dd5264-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.888846 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.888866 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.888904 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.888941 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c942249-251a-4607-9f2d-c14456dd5264-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.889003 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.889029 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.889075 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gzhz\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-kube-api-access-9gzhz\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.890483 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.893264 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.895039 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.896115 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.898132 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.899809 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c942249-251a-4607-9f2d-c14456dd5264-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.900966 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.900999 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/90152b39ca3535b1b2a5e3963e7e39507c984e6965f0e8098d13cacf3eafa19c/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.902221 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c942249-251a-4607-9f2d-c14456dd5264-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.912588 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gzhz\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-kube-api-access-9gzhz\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:53 crc kubenswrapper[4811]: I0128 17:05:53.966153 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.002757 4811 generic.go:334] "Generic (PLEG): container finished" podID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerID="49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa" exitCode=0 Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.003924 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" event={"ID":"d7f58280-b425-496c-865a-f2f9bdc31e49","Type":"ContainerDied","Data":"49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa"} Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.003965 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" event={"ID":"d7f58280-b425-496c-865a-f2f9bdc31e49","Type":"ContainerStarted","Data":"e9a7f300644a963a5a5b77789618aa15a1fd024c91f229d7529305dfd56b9c82"} Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.015738 4811 generic.go:334] "Generic (PLEG): container finished" podID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerID="756a62fe1d19e54304eed8ff436bae7ee193151fdecef503b38a1e123e892c72" exitCode=0 Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.015792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" event={"ID":"48d869be-3f1e-4fe1-93ec-fdd211c9045f","Type":"ContainerDied","Data":"756a62fe1d19e54304eed8ff436bae7ee193151fdecef503b38a1e123e892c72"} Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.015825 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" event={"ID":"48d869be-3f1e-4fe1-93ec-fdd211c9045f","Type":"ContainerStarted","Data":"4385bafabc88cc3cbc14b2e86c982e05565026874378f760882f3fb41729358f"} Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.095327 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.229685 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:05:54 crc kubenswrapper[4811]: I0128 17:05:54.645619 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.024851 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" event={"ID":"d7f58280-b425-496c-865a-f2f9bdc31e49","Type":"ContainerStarted","Data":"16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e"} Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.025017 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.025984 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc","Type":"ContainerStarted","Data":"f376ba5c96c8514c4f84a6f5710dbf577e742c765f2d998372eece9e4bda96ae"} Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.027649 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" event={"ID":"48d869be-3f1e-4fe1-93ec-fdd211c9045f","Type":"ContainerStarted","Data":"4888d44d82390c4c2961477651a6164a249728702067c2317fb7fda465b4c21a"} Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.027828 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.029230 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c942249-251a-4607-9f2d-c14456dd5264","Type":"ContainerStarted","Data":"9133e462bad21495db39404627d351d8069e4b8152cd8068e832a98b3d623195"} Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.034771 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.036310 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.038899 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.038917 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-vvz4m" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.038924 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.039282 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.052604 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.054825 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.058114 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" podStartSLOduration=3.058099937 podStartE2EDuration="3.058099937s" podCreationTimestamp="2026-01-28 17:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:05:55.052058163 +0000 UTC m=+4847.806421746" watchObservedRunningTime="2026-01-28 17:05:55.058099937 +0000 UTC m=+4847.812463520" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.097309 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" podStartSLOduration=3.097290934 podStartE2EDuration="3.097290934s" podCreationTimestamp="2026-01-28 17:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:05:55.078418801 +0000 UTC m=+4847.832782384" watchObservedRunningTime="2026-01-28 17:05:55.097290934 +0000 UTC m=+4847.851654517" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.111956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112023 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-febfcddc-859a-421c-8010-20665d69528c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-febfcddc-859a-421c-8010-20665d69528c\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112073 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112102 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-config-data-default\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112216 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112497 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112802 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9xfl\" (UniqueName: \"kubernetes.io/projected/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-kube-api-access-t9xfl\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.112976 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-kolla-config\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215196 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215276 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-febfcddc-859a-421c-8010-20665d69528c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-febfcddc-859a-421c-8010-20665d69528c\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215301 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215333 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-config-data-default\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215383 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215507 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.215818 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.216399 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-config-data-default\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.218465 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.218509 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-febfcddc-859a-421c-8010-20665d69528c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-febfcddc-859a-421c-8010-20665d69528c\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/eb2dd407a258cf6c81a9f99b64008923f64c80ff69da7bb9c60c7692f9060dce/globalmount\"" pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.219003 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.219062 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9xfl\" (UniqueName: \"kubernetes.io/projected/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-kube-api-access-t9xfl\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.219132 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-kolla-config\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.219703 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.221286 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-kolla-config\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.232582 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9xfl\" (UniqueName: \"kubernetes.io/projected/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-kube-api-access-t9xfl\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.235060 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53fb2b01-0aa4-4d3a-8395-e1f0399802d4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.275465 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-febfcddc-859a-421c-8010-20665d69528c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-febfcddc-859a-421c-8010-20665d69528c\") pod \"openstack-galera-0\" (UID: \"53fb2b01-0aa4-4d3a-8395-e1f0399802d4\") " pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.358361 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.430517 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.433750 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.438014 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-24qb9" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.438331 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.448420 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.523521 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-462dg\" (UniqueName: \"kubernetes.io/projected/891a977b-9794-4c64-aabc-90555d95f8b3-kube-api-access-462dg\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.524032 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891a977b-9794-4c64-aabc-90555d95f8b3-config-data\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.524062 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/891a977b-9794-4c64-aabc-90555d95f8b3-kolla-config\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.624926 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891a977b-9794-4c64-aabc-90555d95f8b3-config-data\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.624963 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/891a977b-9794-4c64-aabc-90555d95f8b3-kolla-config\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.624997 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-462dg\" (UniqueName: \"kubernetes.io/projected/891a977b-9794-4c64-aabc-90555d95f8b3-kube-api-access-462dg\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.625953 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891a977b-9794-4c64-aabc-90555d95f8b3-config-data\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.626225 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/891a977b-9794-4c64-aabc-90555d95f8b3-kolla-config\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.694547 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-462dg\" (UniqueName: \"kubernetes.io/projected/891a977b-9794-4c64-aabc-90555d95f8b3-kube-api-access-462dg\") pod \"memcached-0\" (UID: \"891a977b-9794-4c64-aabc-90555d95f8b3\") " pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.763972 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 28 17:05:55 crc kubenswrapper[4811]: I0128 17:05:55.841671 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.037807 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"53fb2b01-0aa4-4d3a-8395-e1f0399802d4","Type":"ContainerStarted","Data":"03261ae029630abbe4b56932758d3700d158b3f58686a63426bb3a33d72eaf02"} Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.040370 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c942249-251a-4607-9f2d-c14456dd5264","Type":"ContainerStarted","Data":"7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03"} Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.042471 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc","Type":"ContainerStarted","Data":"d76863b1859ce5b15b1796916f94637d4b80bb70dbfb2adad72dc93d373bf5c7"} Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.188250 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 28 17:05:56 crc kubenswrapper[4811]: W0128 17:05:56.196664 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod891a977b_9794_4c64_aabc_90555d95f8b3.slice/crio-5213607fcfb689c722e6531dee4a799a8980715a176a3ab3577f681bb4cd5ecd WatchSource:0}: Error finding container 5213607fcfb689c722e6531dee4a799a8980715a176a3ab3577f681bb4cd5ecd: Status 404 returned error can't find the container with id 5213607fcfb689c722e6531dee4a799a8980715a176a3ab3577f681bb4cd5ecd Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.368570 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.370616 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.373021 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.373120 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-wv2hx" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.373120 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.373122 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.381860 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436378 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436464 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436561 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-68025001-7751-41ef-b358-30a4aec67e85\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68025001-7751-41ef-b358-30a4aec67e85\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436703 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436730 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs7t6\" (UniqueName: \"kubernetes.io/projected/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-kube-api-access-hs7t6\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436779 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436855 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.436886 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.538515 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.538848 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.538883 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-68025001-7751-41ef-b358-30a4aec67e85\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68025001-7751-41ef-b358-30a4aec67e85\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.538924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.538950 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs7t6\" (UniqueName: \"kubernetes.io/projected/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-kube-api-access-hs7t6\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.538979 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.539017 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.539040 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.539516 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.541287 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.541378 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.541943 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.547398 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.548847 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.549270 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.549296 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-68025001-7751-41ef-b358-30a4aec67e85\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68025001-7751-41ef-b358-30a4aec67e85\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8951c8b51ee43cecae16ac11ee91eb16f7a4fa457e0f6703f309d23f5113dbc4/globalmount\"" pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.569322 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs7t6\" (UniqueName: \"kubernetes.io/projected/36dc7233-7d69-4de6-ac18-3c7e87a7b21c-kube-api-access-hs7t6\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.596749 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-68025001-7751-41ef-b358-30a4aec67e85\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68025001-7751-41ef-b358-30a4aec67e85\") pod \"openstack-cell1-galera-0\" (UID: \"36dc7233-7d69-4de6-ac18-3c7e87a7b21c\") " pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:56 crc kubenswrapper[4811]: I0128 17:05:56.686900 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 28 17:05:57 crc kubenswrapper[4811]: I0128 17:05:57.050713 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"53fb2b01-0aa4-4d3a-8395-e1f0399802d4","Type":"ContainerStarted","Data":"bcff6a2ab683bbd0d1cc06019536c8a957e87d88a600b60ba1f86c3e017ca3bf"} Jan 28 17:05:57 crc kubenswrapper[4811]: I0128 17:05:57.052053 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"891a977b-9794-4c64-aabc-90555d95f8b3","Type":"ContainerStarted","Data":"3879becedea67bace6a334005aee2e65e226e5d200c31e486b18169e692f5897"} Jan 28 17:05:57 crc kubenswrapper[4811]: I0128 17:05:57.052105 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"891a977b-9794-4c64-aabc-90555d95f8b3","Type":"ContainerStarted","Data":"5213607fcfb689c722e6531dee4a799a8980715a176a3ab3577f681bb4cd5ecd"} Jan 28 17:05:57 crc kubenswrapper[4811]: I0128 17:05:57.142090 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.142074105 podStartE2EDuration="2.142074105s" podCreationTimestamp="2026-01-28 17:05:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:05:57.140978865 +0000 UTC m=+4849.895342448" watchObservedRunningTime="2026-01-28 17:05:57.142074105 +0000 UTC m=+4849.896437688" Jan 28 17:05:57 crc kubenswrapper[4811]: I0128 17:05:57.171051 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 28 17:05:57 crc kubenswrapper[4811]: W0128 17:05:57.172069 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36dc7233_7d69_4de6_ac18_3c7e87a7b21c.slice/crio-d873607db33923e19973750d3940411859c604c60451d25b9e54cdf69776116b WatchSource:0}: Error finding container d873607db33923e19973750d3940411859c604c60451d25b9e54cdf69776116b: Status 404 returned error can't find the container with id d873607db33923e19973750d3940411859c604c60451d25b9e54cdf69776116b Jan 28 17:05:58 crc kubenswrapper[4811]: I0128 17:05:58.062493 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"36dc7233-7d69-4de6-ac18-3c7e87a7b21c","Type":"ContainerStarted","Data":"9536edd9c28dbc4ca5e262c6432cf5c1173b21b687fe9ee091cd210db0bbbd47"} Jan 28 17:05:58 crc kubenswrapper[4811]: I0128 17:05:58.063317 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 28 17:05:58 crc kubenswrapper[4811]: I0128 17:05:58.063334 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"36dc7233-7d69-4de6-ac18-3c7e87a7b21c","Type":"ContainerStarted","Data":"d873607db33923e19973750d3940411859c604c60451d25b9e54cdf69776116b"} Jan 28 17:06:02 crc kubenswrapper[4811]: I0128 17:06:02.611658 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:06:02 crc kubenswrapper[4811]: I0128 17:06:02.816906 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:06:02 crc kubenswrapper[4811]: I0128 17:06:02.862834 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-2zrdl"] Jan 28 17:06:03 crc kubenswrapper[4811]: I0128 17:06:03.087071 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:06:03 crc kubenswrapper[4811]: I0128 17:06:03.087130 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:06:03 crc kubenswrapper[4811]: I0128 17:06:03.105538 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerName="dnsmasq-dns" containerID="cri-o://4888d44d82390c4c2961477651a6164a249728702067c2317fb7fda465b4c21a" gracePeriod=10 Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.113959 4811 generic.go:334] "Generic (PLEG): container finished" podID="53fb2b01-0aa4-4d3a-8395-e1f0399802d4" containerID="bcff6a2ab683bbd0d1cc06019536c8a957e87d88a600b60ba1f86c3e017ca3bf" exitCode=0 Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.114159 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"53fb2b01-0aa4-4d3a-8395-e1f0399802d4","Type":"ContainerDied","Data":"bcff6a2ab683bbd0d1cc06019536c8a957e87d88a600b60ba1f86c3e017ca3bf"} Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.116401 4811 generic.go:334] "Generic (PLEG): container finished" podID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerID="4888d44d82390c4c2961477651a6164a249728702067c2317fb7fda465b4c21a" exitCode=0 Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.116448 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" event={"ID":"48d869be-3f1e-4fe1-93ec-fdd211c9045f","Type":"ContainerDied","Data":"4888d44d82390c4c2961477651a6164a249728702067c2317fb7fda465b4c21a"} Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.116474 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" event={"ID":"48d869be-3f1e-4fe1-93ec-fdd211c9045f","Type":"ContainerDied","Data":"4385bafabc88cc3cbc14b2e86c982e05565026874378f760882f3fb41729358f"} Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.116485 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4385bafabc88cc3cbc14b2e86c982e05565026874378f760882f3fb41729358f" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.202167 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.359246 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-config\") pod \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.363861 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm76n\" (UniqueName: \"kubernetes.io/projected/48d869be-3f1e-4fe1-93ec-fdd211c9045f-kube-api-access-rm76n\") pod \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.363957 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-dns-svc\") pod \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\" (UID: \"48d869be-3f1e-4fe1-93ec-fdd211c9045f\") " Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.372645 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48d869be-3f1e-4fe1-93ec-fdd211c9045f-kube-api-access-rm76n" (OuterVolumeSpecName: "kube-api-access-rm76n") pod "48d869be-3f1e-4fe1-93ec-fdd211c9045f" (UID: "48d869be-3f1e-4fe1-93ec-fdd211c9045f"). InnerVolumeSpecName "kube-api-access-rm76n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.398418 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-config" (OuterVolumeSpecName: "config") pod "48d869be-3f1e-4fe1-93ec-fdd211c9045f" (UID: "48d869be-3f1e-4fe1-93ec-fdd211c9045f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.403545 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "48d869be-3f1e-4fe1-93ec-fdd211c9045f" (UID: "48d869be-3f1e-4fe1-93ec-fdd211c9045f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.466177 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm76n\" (UniqueName: \"kubernetes.io/projected/48d869be-3f1e-4fe1-93ec-fdd211c9045f-kube-api-access-rm76n\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.467198 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:04 crc kubenswrapper[4811]: I0128 17:06:04.467507 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48d869be-3f1e-4fe1-93ec-fdd211c9045f-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.124307 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-2zrdl" Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.124301 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"53fb2b01-0aa4-4d3a-8395-e1f0399802d4","Type":"ContainerStarted","Data":"7b0f486ada906a11b742f80df278cdf723576c6adf5fc13b6fbec3e50c65f45b"} Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.157361 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=11.15734457 podStartE2EDuration="11.15734457s" podCreationTimestamp="2026-01-28 17:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:06:05.149753734 +0000 UTC m=+4857.904117317" watchObservedRunningTime="2026-01-28 17:06:05.15734457 +0000 UTC m=+4857.911708153" Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.169865 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-2zrdl"] Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.177662 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-2zrdl"] Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.359099 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.359179 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 28 17:06:05 crc kubenswrapper[4811]: I0128 17:06:05.766464 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 28 17:06:06 crc kubenswrapper[4811]: I0128 17:06:06.135612 4811 generic.go:334] "Generic (PLEG): container finished" podID="36dc7233-7d69-4de6-ac18-3c7e87a7b21c" containerID="9536edd9c28dbc4ca5e262c6432cf5c1173b21b687fe9ee091cd210db0bbbd47" exitCode=0 Jan 28 17:06:06 crc kubenswrapper[4811]: I0128 17:06:06.135725 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"36dc7233-7d69-4de6-ac18-3c7e87a7b21c","Type":"ContainerDied","Data":"9536edd9c28dbc4ca5e262c6432cf5c1173b21b687fe9ee091cd210db0bbbd47"} Jan 28 17:06:06 crc kubenswrapper[4811]: I0128 17:06:06.351052 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" path="/var/lib/kubelet/pods/48d869be-3f1e-4fe1-93ec-fdd211c9045f/volumes" Jan 28 17:06:07 crc kubenswrapper[4811]: I0128 17:06:07.144295 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"36dc7233-7d69-4de6-ac18-3c7e87a7b21c","Type":"ContainerStarted","Data":"bd8a95e7278d493bb43d2d06bbd188dcde3207fd3a4ea5e5c7eb1a4cf0af46fc"} Jan 28 17:06:07 crc kubenswrapper[4811]: I0128 17:06:07.168769 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=12.168733211 podStartE2EDuration="12.168733211s" podCreationTimestamp="2026-01-28 17:05:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:06:07.164400683 +0000 UTC m=+4859.918764276" watchObservedRunningTime="2026-01-28 17:06:07.168733211 +0000 UTC m=+4859.923096794" Jan 28 17:06:09 crc kubenswrapper[4811]: I0128 17:06:09.443350 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 28 17:06:09 crc kubenswrapper[4811]: I0128 17:06:09.514027 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.986222 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-x2ljn"] Jan 28 17:06:13 crc kubenswrapper[4811]: E0128 17:06:13.986912 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerName="init" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.986933 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerName="init" Jan 28 17:06:13 crc kubenswrapper[4811]: E0128 17:06:13.986955 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerName="dnsmasq-dns" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.986963 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerName="dnsmasq-dns" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.987122 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="48d869be-3f1e-4fe1-93ec-fdd211c9045f" containerName="dnsmasq-dns" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.987732 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.990149 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 28 17:06:13 crc kubenswrapper[4811]: I0128 17:06:13.994404 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-x2ljn"] Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.116235 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhm2v\" (UniqueName: \"kubernetes.io/projected/4ebcd8f7-82b2-4827-add5-a4c34816e73e-kube-api-access-vhm2v\") pod \"root-account-create-update-x2ljn\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.116327 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ebcd8f7-82b2-4827-add5-a4c34816e73e-operator-scripts\") pod \"root-account-create-update-x2ljn\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.217565 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ebcd8f7-82b2-4827-add5-a4c34816e73e-operator-scripts\") pod \"root-account-create-update-x2ljn\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.217735 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhm2v\" (UniqueName: \"kubernetes.io/projected/4ebcd8f7-82b2-4827-add5-a4c34816e73e-kube-api-access-vhm2v\") pod \"root-account-create-update-x2ljn\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.218371 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ebcd8f7-82b2-4827-add5-a4c34816e73e-operator-scripts\") pod \"root-account-create-update-x2ljn\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.239112 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhm2v\" (UniqueName: \"kubernetes.io/projected/4ebcd8f7-82b2-4827-add5-a4c34816e73e-kube-api-access-vhm2v\") pod \"root-account-create-update-x2ljn\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.307226 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:14 crc kubenswrapper[4811]: I0128 17:06:14.726324 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-x2ljn"] Jan 28 17:06:14 crc kubenswrapper[4811]: W0128 17:06:14.732396 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ebcd8f7_82b2_4827_add5_a4c34816e73e.slice/crio-503c6667e6134559884d6de62a05680dc5f9a013ba2af3a5bedbf8c2decd50ea WatchSource:0}: Error finding container 503c6667e6134559884d6de62a05680dc5f9a013ba2af3a5bedbf8c2decd50ea: Status 404 returned error can't find the container with id 503c6667e6134559884d6de62a05680dc5f9a013ba2af3a5bedbf8c2decd50ea Jan 28 17:06:15 crc kubenswrapper[4811]: I0128 17:06:15.201812 4811 generic.go:334] "Generic (PLEG): container finished" podID="4ebcd8f7-82b2-4827-add5-a4c34816e73e" containerID="7055b7934b4296a3bf57ec5ee52ce3b3a3a6cbfb576c0da2edbce6b46d7f4f51" exitCode=0 Jan 28 17:06:15 crc kubenswrapper[4811]: I0128 17:06:15.201974 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-x2ljn" event={"ID":"4ebcd8f7-82b2-4827-add5-a4c34816e73e","Type":"ContainerDied","Data":"7055b7934b4296a3bf57ec5ee52ce3b3a3a6cbfb576c0da2edbce6b46d7f4f51"} Jan 28 17:06:15 crc kubenswrapper[4811]: I0128 17:06:15.202238 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-x2ljn" event={"ID":"4ebcd8f7-82b2-4827-add5-a4c34816e73e","Type":"ContainerStarted","Data":"503c6667e6134559884d6de62a05680dc5f9a013ba2af3a5bedbf8c2decd50ea"} Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.507237 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.660833 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ebcd8f7-82b2-4827-add5-a4c34816e73e-operator-scripts\") pod \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.661891 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ebcd8f7-82b2-4827-add5-a4c34816e73e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ebcd8f7-82b2-4827-add5-a4c34816e73e" (UID: "4ebcd8f7-82b2-4827-add5-a4c34816e73e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.662032 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhm2v\" (UniqueName: \"kubernetes.io/projected/4ebcd8f7-82b2-4827-add5-a4c34816e73e-kube-api-access-vhm2v\") pod \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\" (UID: \"4ebcd8f7-82b2-4827-add5-a4c34816e73e\") " Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.662659 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ebcd8f7-82b2-4827-add5-a4c34816e73e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.668393 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebcd8f7-82b2-4827-add5-a4c34816e73e-kube-api-access-vhm2v" (OuterVolumeSpecName: "kube-api-access-vhm2v") pod "4ebcd8f7-82b2-4827-add5-a4c34816e73e" (UID: "4ebcd8f7-82b2-4827-add5-a4c34816e73e"). InnerVolumeSpecName "kube-api-access-vhm2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.687408 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.687474 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.757659 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 28 17:06:16 crc kubenswrapper[4811]: I0128 17:06:16.764321 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhm2v\" (UniqueName: \"kubernetes.io/projected/4ebcd8f7-82b2-4827-add5-a4c34816e73e-kube-api-access-vhm2v\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:17 crc kubenswrapper[4811]: I0128 17:06:17.215910 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-x2ljn" event={"ID":"4ebcd8f7-82b2-4827-add5-a4c34816e73e","Type":"ContainerDied","Data":"503c6667e6134559884d6de62a05680dc5f9a013ba2af3a5bedbf8c2decd50ea"} Jan 28 17:06:17 crc kubenswrapper[4811]: I0128 17:06:17.215956 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="503c6667e6134559884d6de62a05680dc5f9a013ba2af3a5bedbf8c2decd50ea" Jan 28 17:06:17 crc kubenswrapper[4811]: I0128 17:06:17.215924 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-x2ljn" Jan 28 17:06:17 crc kubenswrapper[4811]: I0128 17:06:17.279923 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.351653 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-x2ljn"] Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.363698 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-x2ljn"] Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.427944 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rr6sj"] Jan 28 17:06:25 crc kubenswrapper[4811]: E0128 17:06:25.428327 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebcd8f7-82b2-4827-add5-a4c34816e73e" containerName="mariadb-account-create-update" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.428349 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebcd8f7-82b2-4827-add5-a4c34816e73e" containerName="mariadb-account-create-update" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.428600 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ebcd8f7-82b2-4827-add5-a4c34816e73e" containerName="mariadb-account-create-update" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.429214 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.435332 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.440140 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rr6sj"] Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.487675 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56sm7\" (UniqueName: \"kubernetes.io/projected/4d95d45b-5089-4861-8542-d2a8e3839028-kube-api-access-56sm7\") pod \"root-account-create-update-rr6sj\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.488680 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d95d45b-5089-4861-8542-d2a8e3839028-operator-scripts\") pod \"root-account-create-update-rr6sj\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.590902 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56sm7\" (UniqueName: \"kubernetes.io/projected/4d95d45b-5089-4861-8542-d2a8e3839028-kube-api-access-56sm7\") pod \"root-account-create-update-rr6sj\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.590967 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d95d45b-5089-4861-8542-d2a8e3839028-operator-scripts\") pod \"root-account-create-update-rr6sj\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.591903 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d95d45b-5089-4861-8542-d2a8e3839028-operator-scripts\") pod \"root-account-create-update-rr6sj\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.616091 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56sm7\" (UniqueName: \"kubernetes.io/projected/4d95d45b-5089-4861-8542-d2a8e3839028-kube-api-access-56sm7\") pod \"root-account-create-update-rr6sj\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:25 crc kubenswrapper[4811]: I0128 17:06:25.752414 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:26 crc kubenswrapper[4811]: W0128 17:06:26.174043 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d95d45b_5089_4861_8542_d2a8e3839028.slice/crio-1088a778469d10193fcda2c714f799d2c59a3672e5317affcc1c0af2d1ac3af4 WatchSource:0}: Error finding container 1088a778469d10193fcda2c714f799d2c59a3672e5317affcc1c0af2d1ac3af4: Status 404 returned error can't find the container with id 1088a778469d10193fcda2c714f799d2c59a3672e5317affcc1c0af2d1ac3af4 Jan 28 17:06:26 crc kubenswrapper[4811]: I0128 17:06:26.174068 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rr6sj"] Jan 28 17:06:26 crc kubenswrapper[4811]: I0128 17:06:26.274167 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rr6sj" event={"ID":"4d95d45b-5089-4861-8542-d2a8e3839028","Type":"ContainerStarted","Data":"1088a778469d10193fcda2c714f799d2c59a3672e5317affcc1c0af2d1ac3af4"} Jan 28 17:06:26 crc kubenswrapper[4811]: I0128 17:06:26.351778 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ebcd8f7-82b2-4827-add5-a4c34816e73e" path="/var/lib/kubelet/pods/4ebcd8f7-82b2-4827-add5-a4c34816e73e/volumes" Jan 28 17:06:27 crc kubenswrapper[4811]: I0128 17:06:27.283889 4811 generic.go:334] "Generic (PLEG): container finished" podID="4d95d45b-5089-4861-8542-d2a8e3839028" containerID="579bb675436dc02c9317d78f33a666aa7bb9f251259cb48a9fd03e14eb0b8f82" exitCode=0 Jan 28 17:06:27 crc kubenswrapper[4811]: I0128 17:06:27.283941 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rr6sj" event={"ID":"4d95d45b-5089-4861-8542-d2a8e3839028","Type":"ContainerDied","Data":"579bb675436dc02c9317d78f33a666aa7bb9f251259cb48a9fd03e14eb0b8f82"} Jan 28 17:06:27 crc kubenswrapper[4811]: I0128 17:06:27.287090 4811 generic.go:334] "Generic (PLEG): container finished" podID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerID="d76863b1859ce5b15b1796916f94637d4b80bb70dbfb2adad72dc93d373bf5c7" exitCode=0 Jan 28 17:06:27 crc kubenswrapper[4811]: I0128 17:06:27.287199 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc","Type":"ContainerDied","Data":"d76863b1859ce5b15b1796916f94637d4b80bb70dbfb2adad72dc93d373bf5c7"} Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.296286 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc","Type":"ContainerStarted","Data":"864048aeeed6bc079da1f60469ce80f05a9b8972f6175c3cdf38b22609c592f6"} Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.296764 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.298698 4811 generic.go:334] "Generic (PLEG): container finished" podID="8c942249-251a-4607-9f2d-c14456dd5264" containerID="7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03" exitCode=0 Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.298731 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c942249-251a-4607-9f2d-c14456dd5264","Type":"ContainerDied","Data":"7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03"} Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.326018 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.325999942 podStartE2EDuration="36.325999942s" podCreationTimestamp="2026-01-28 17:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:06:28.320503652 +0000 UTC m=+4881.074867245" watchObservedRunningTime="2026-01-28 17:06:28.325999942 +0000 UTC m=+4881.080363525" Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.590372 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.742047 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56sm7\" (UniqueName: \"kubernetes.io/projected/4d95d45b-5089-4861-8542-d2a8e3839028-kube-api-access-56sm7\") pod \"4d95d45b-5089-4861-8542-d2a8e3839028\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.742177 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d95d45b-5089-4861-8542-d2a8e3839028-operator-scripts\") pod \"4d95d45b-5089-4861-8542-d2a8e3839028\" (UID: \"4d95d45b-5089-4861-8542-d2a8e3839028\") " Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.742928 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d95d45b-5089-4861-8542-d2a8e3839028-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4d95d45b-5089-4861-8542-d2a8e3839028" (UID: "4d95d45b-5089-4861-8542-d2a8e3839028"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.746546 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d95d45b-5089-4861-8542-d2a8e3839028-kube-api-access-56sm7" (OuterVolumeSpecName: "kube-api-access-56sm7") pod "4d95d45b-5089-4861-8542-d2a8e3839028" (UID: "4d95d45b-5089-4861-8542-d2a8e3839028"). InnerVolumeSpecName "kube-api-access-56sm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.747503 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d95d45b-5089-4861-8542-d2a8e3839028-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:28 crc kubenswrapper[4811]: I0128 17:06:28.747546 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56sm7\" (UniqueName: \"kubernetes.io/projected/4d95d45b-5089-4861-8542-d2a8e3839028-kube-api-access-56sm7\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:29 crc kubenswrapper[4811]: I0128 17:06:29.316370 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c942249-251a-4607-9f2d-c14456dd5264","Type":"ContainerStarted","Data":"26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb"} Jan 28 17:06:29 crc kubenswrapper[4811]: I0128 17:06:29.316625 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:29 crc kubenswrapper[4811]: I0128 17:06:29.318162 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rr6sj" Jan 28 17:06:29 crc kubenswrapper[4811]: I0128 17:06:29.318165 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rr6sj" event={"ID":"4d95d45b-5089-4861-8542-d2a8e3839028","Type":"ContainerDied","Data":"1088a778469d10193fcda2c714f799d2c59a3672e5317affcc1c0af2d1ac3af4"} Jan 28 17:06:29 crc kubenswrapper[4811]: I0128 17:06:29.318220 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1088a778469d10193fcda2c714f799d2c59a3672e5317affcc1c0af2d1ac3af4" Jan 28 17:06:29 crc kubenswrapper[4811]: I0128 17:06:29.341793 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.341771136 podStartE2EDuration="37.341771136s" podCreationTimestamp="2026-01-28 17:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:06:29.337820228 +0000 UTC m=+4882.092183821" watchObservedRunningTime="2026-01-28 17:06:29.341771136 +0000 UTC m=+4882.096134719" Jan 28 17:06:33 crc kubenswrapper[4811]: I0128 17:06:33.087728 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:06:33 crc kubenswrapper[4811]: I0128 17:06:33.089071 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:06:43 crc kubenswrapper[4811]: I0128 17:06:43.692662 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 28 17:06:44 crc kubenswrapper[4811]: I0128 17:06:44.099186 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:47 crc kubenswrapper[4811]: I0128 17:06:47.961006 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-699964fbc-r79rf"] Jan 28 17:06:47 crc kubenswrapper[4811]: E0128 17:06:47.961656 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d95d45b-5089-4861-8542-d2a8e3839028" containerName="mariadb-account-create-update" Jan 28 17:06:47 crc kubenswrapper[4811]: I0128 17:06:47.961676 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d95d45b-5089-4861-8542-d2a8e3839028" containerName="mariadb-account-create-update" Jan 28 17:06:47 crc kubenswrapper[4811]: I0128 17:06:47.961835 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d95d45b-5089-4861-8542-d2a8e3839028" containerName="mariadb-account-create-update" Jan 28 17:06:47 crc kubenswrapper[4811]: I0128 17:06:47.962747 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:47 crc kubenswrapper[4811]: I0128 17:06:47.976615 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-r79rf"] Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.146340 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-dns-svc\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.146494 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm4xw\" (UniqueName: \"kubernetes.io/projected/073b884f-1b51-425e-a2eb-5e7e137a58d1-kube-api-access-sm4xw\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.146606 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-config\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.248373 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-dns-svc\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.248494 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm4xw\" (UniqueName: \"kubernetes.io/projected/073b884f-1b51-425e-a2eb-5e7e137a58d1-kube-api-access-sm4xw\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.248535 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-config\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.250120 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-config\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.250175 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-dns-svc\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.273133 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm4xw\" (UniqueName: \"kubernetes.io/projected/073b884f-1b51-425e-a2eb-5e7e137a58d1-kube-api-access-sm4xw\") pod \"dnsmasq-dns-699964fbc-r79rf\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.288174 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.753674 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-r79rf"] Jan 28 17:06:48 crc kubenswrapper[4811]: I0128 17:06:48.866747 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:06:49 crc kubenswrapper[4811]: I0128 17:06:49.308287 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:06:49 crc kubenswrapper[4811]: I0128 17:06:49.476039 4811 generic.go:334] "Generic (PLEG): container finished" podID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerID="7b70eaff5d2cf822dc041050e95eda7d0e2b9c322f3a51f5919a91942370b5f6" exitCode=0 Jan 28 17:06:49 crc kubenswrapper[4811]: I0128 17:06:49.476262 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-r79rf" event={"ID":"073b884f-1b51-425e-a2eb-5e7e137a58d1","Type":"ContainerDied","Data":"7b70eaff5d2cf822dc041050e95eda7d0e2b9c322f3a51f5919a91942370b5f6"} Jan 28 17:06:49 crc kubenswrapper[4811]: I0128 17:06:49.476327 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-r79rf" event={"ID":"073b884f-1b51-425e-a2eb-5e7e137a58d1","Type":"ContainerStarted","Data":"8c5f90a4f0a2cf04094eeabd88c420ded5a4051a937fdcb2b4af9fd251d0faf4"} Jan 28 17:06:50 crc kubenswrapper[4811]: I0128 17:06:50.485606 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-r79rf" event={"ID":"073b884f-1b51-425e-a2eb-5e7e137a58d1","Type":"ContainerStarted","Data":"490d4c0fe896d9635f66c94e540357ca1dd9b2290afdfcdcade8a62a7d524aeb"} Jan 28 17:06:50 crc kubenswrapper[4811]: I0128 17:06:50.486708 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:50 crc kubenswrapper[4811]: I0128 17:06:50.502368 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-699964fbc-r79rf" podStartSLOduration=3.502346834 podStartE2EDuration="3.502346834s" podCreationTimestamp="2026-01-28 17:06:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:06:50.501677676 +0000 UTC m=+4903.256041259" watchObservedRunningTime="2026-01-28 17:06:50.502346834 +0000 UTC m=+4903.256710417" Jan 28 17:06:50 crc kubenswrapper[4811]: I0128 17:06:50.898151 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="rabbitmq" containerID="cri-o://864048aeeed6bc079da1f60469ce80f05a9b8972f6175c3cdf38b22609c592f6" gracePeriod=604798 Jan 28 17:06:51 crc kubenswrapper[4811]: I0128 17:06:51.408006 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="rabbitmq" containerID="cri-o://26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb" gracePeriod=604798 Jan 28 17:06:53 crc kubenswrapper[4811]: I0128 17:06:53.689625 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.245:5672: connect: connection refused" Jan 28 17:06:54 crc kubenswrapper[4811]: I0128 17:06:54.096903 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.246:5672: connect: connection refused" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.535071 4811 generic.go:334] "Generic (PLEG): container finished" podID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerID="864048aeeed6bc079da1f60469ce80f05a9b8972f6175c3cdf38b22609c592f6" exitCode=0 Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.535115 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc","Type":"ContainerDied","Data":"864048aeeed6bc079da1f60469ce80f05a9b8972f6175c3cdf38b22609c592f6"} Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.763013 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.898076 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-confd\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.898859 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.898934 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-server-conf\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.898970 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s98v\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-kube-api-access-6s98v\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.898984 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-plugins-conf\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.899021 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-erlang-cookie\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.899199 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-erlang-cookie-secret\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.899245 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-plugins\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.899378 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-pod-info\") pod \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\" (UID: \"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc\") " Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.900386 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.900797 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.900802 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.900952 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.906215 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-pod-info" (OuterVolumeSpecName: "pod-info") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.906333 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-kube-api-access-6s98v" (OuterVolumeSpecName: "kube-api-access-6s98v") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "kube-api-access-6s98v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.910686 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.916065 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8" (OuterVolumeSpecName: "persistence") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.926007 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-server-conf" (OuterVolumeSpecName: "server-conf") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:57 crc kubenswrapper[4811]: I0128 17:06:57.985189 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" (UID: "4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004726 4811 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-server-conf\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004772 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s98v\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-kube-api-access-6s98v\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004788 4811 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004799 4811 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004810 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004826 4811 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-pod-info\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004837 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.004890 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") on node \"crc\" " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.024824 4811 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.025043 4811 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8") on node "crc" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.106811 4811 reconciler_common.go:293] "Volume detached for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.108023 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.208851 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-plugins-conf\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.209216 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c942249-251a-4607-9f2d-c14456dd5264-erlang-cookie-secret\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.209376 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.209478 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c942249-251a-4607-9f2d-c14456dd5264-pod-info\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.212149 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-plugins\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.223048 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.223605 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.223701 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gzhz\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-kube-api-access-9gzhz\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.223762 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-server-conf\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.223800 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-erlang-cookie\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.223820 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-confd\") pod \"8c942249-251a-4607-9f2d-c14456dd5264\" (UID: \"8c942249-251a-4607-9f2d-c14456dd5264\") " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.224332 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.224353 4811 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.231023 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c942249-251a-4607-9f2d-c14456dd5264-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.231638 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.246172 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8c942249-251a-4607-9f2d-c14456dd5264-pod-info" (OuterVolumeSpecName: "pod-info") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.255006 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-kube-api-access-9gzhz" (OuterVolumeSpecName: "kube-api-access-9gzhz") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "kube-api-access-9gzhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.285148 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-server-conf" (OuterVolumeSpecName: "server-conf") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.296609 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.341471 4811 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8c942249-251a-4607-9f2d-c14456dd5264-pod-info\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.341502 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gzhz\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-kube-api-access-9gzhz\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.341515 4811 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8c942249-251a-4607-9f2d-c14456dd5264-server-conf\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.341528 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.341539 4811 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8c942249-251a-4607-9f2d-c14456dd5264-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.386654 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57" (OuterVolumeSpecName: "persistence") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.431633 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8c942249-251a-4607-9f2d-c14456dd5264" (UID: "8c942249-251a-4607-9f2d-c14456dd5264"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.443730 4811 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8c942249-251a-4607-9f2d-c14456dd5264-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.443770 4811 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") on node \"crc\" " Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.455640 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-qpvvf"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.455918 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerName="dnsmasq-dns" containerID="cri-o://16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e" gracePeriod=10 Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.502540 4811 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.502765 4811 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57") on node "crc" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.547297 4811 reconciler_common.go:293] "Volume detached for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.550069 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc","Type":"ContainerDied","Data":"f376ba5c96c8514c4f84a6f5710dbf577e742c765f2d998372eece9e4bda96ae"} Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.550119 4811 scope.go:117] "RemoveContainer" containerID="864048aeeed6bc079da1f60469ce80f05a9b8972f6175c3cdf38b22609c592f6" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.550249 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.568707 4811 generic.go:334] "Generic (PLEG): container finished" podID="8c942249-251a-4607-9f2d-c14456dd5264" containerID="26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb" exitCode=0 Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.568921 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c942249-251a-4607-9f2d-c14456dd5264","Type":"ContainerDied","Data":"26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb"} Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.569130 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8c942249-251a-4607-9f2d-c14456dd5264","Type":"ContainerDied","Data":"9133e462bad21495db39404627d351d8069e4b8152cd8068e832a98b3d623195"} Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.569085 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.590895 4811 scope.go:117] "RemoveContainer" containerID="d76863b1859ce5b15b1796916f94637d4b80bb70dbfb2adad72dc93d373bf5c7" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.597251 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.614307 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.631450 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.638415 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.648824 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: E0128 17:06:58.649187 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="setup-container" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.649202 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="setup-container" Jan 28 17:06:58 crc kubenswrapper[4811]: E0128 17:06:58.649220 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="rabbitmq" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.649227 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="rabbitmq" Jan 28 17:06:58 crc kubenswrapper[4811]: E0128 17:06:58.649243 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="rabbitmq" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.649252 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="rabbitmq" Jan 28 17:06:58 crc kubenswrapper[4811]: E0128 17:06:58.649286 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="setup-container" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.649294 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="setup-container" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.649478 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" containerName="rabbitmq" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.649497 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c942249-251a-4607-9f2d-c14456dd5264" containerName="rabbitmq" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.650514 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.657996 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.658206 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.658409 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.659305 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-k6phq" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.659490 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.671201 4811 scope.go:117] "RemoveContainer" containerID="26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.692731 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.709745 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.711655 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.715778 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-x7595" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.715988 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.716160 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.716304 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.716522 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.724222 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.744994 4811 scope.go:117] "RemoveContainer" containerID="7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751649 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/22b21d12-b525-43e8-a649-a65f4be978ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751697 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/22b21d12-b525-43e8-a649-a65f4be978ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751734 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/22b21d12-b525-43e8-a649-a65f4be978ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751778 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751851 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751889 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm4qj\" (UniqueName: \"kubernetes.io/projected/22b21d12-b525-43e8-a649-a65f4be978ca-kube-api-access-wm4qj\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751937 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.751966 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.752044 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/22b21d12-b525-43e8-a649-a65f4be978ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.774365 4811 scope.go:117] "RemoveContainer" containerID="26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb" Jan 28 17:06:58 crc kubenswrapper[4811]: E0128 17:06:58.774888 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb\": container with ID starting with 26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb not found: ID does not exist" containerID="26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.774948 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb"} err="failed to get container status \"26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb\": rpc error: code = NotFound desc = could not find container \"26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb\": container with ID starting with 26c8218bddf0b9e8c33f25bdf45c817fad4954b5fa1aea040df541af50ac07eb not found: ID does not exist" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.774977 4811 scope.go:117] "RemoveContainer" containerID="7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03" Jan 28 17:06:58 crc kubenswrapper[4811]: E0128 17:06:58.775493 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03\": container with ID starting with 7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03 not found: ID does not exist" containerID="7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.775542 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03"} err="failed to get container status \"7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03\": rpc error: code = NotFound desc = could not find container \"7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03\": container with ID starting with 7436b4730c86c9b0907877d4dad1609f4c1fcd87b8da252553624cb3e2f5ef03 not found: ID does not exist" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.853641 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.853686 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.853718 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.853826 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.853906 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/22b21d12-b525-43e8-a649-a65f4be978ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.853931 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7cd6fbc-98b4-42ff-aa55-b9608806b823-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854005 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5tb2\" (UniqueName: \"kubernetes.io/projected/e7cd6fbc-98b4-42ff-aa55-b9608806b823-kube-api-access-p5tb2\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854042 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/22b21d12-b525-43e8-a649-a65f4be978ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854060 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/22b21d12-b525-43e8-a649-a65f4be978ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854093 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7cd6fbc-98b4-42ff-aa55-b9608806b823-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854118 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/22b21d12-b525-43e8-a649-a65f4be978ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854155 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854175 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7cd6fbc-98b4-42ff-aa55-b9608806b823-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854245 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7cd6fbc-98b4-42ff-aa55-b9608806b823-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854268 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854294 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854349 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm4qj\" (UniqueName: \"kubernetes.io/projected/22b21d12-b525-43e8-a649-a65f4be978ca-kube-api-access-wm4qj\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854392 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.854819 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.855269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/22b21d12-b525-43e8-a649-a65f4be978ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.855330 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/22b21d12-b525-43e8-a649-a65f4be978ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.856957 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.856980 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9838b56633f794a95a8a47cbe108341f22ea29055f81181b011c7440ab2d0112/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.861253 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/22b21d12-b525-43e8-a649-a65f4be978ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.867379 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/22b21d12-b525-43e8-a649-a65f4be978ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.867959 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/22b21d12-b525-43e8-a649-a65f4be978ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.875698 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm4qj\" (UniqueName: \"kubernetes.io/projected/22b21d12-b525-43e8-a649-a65f4be978ca-kube-api-access-wm4qj\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.889199 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25e50ebd-76f9-4a17-b8a3-9d0fafd304a8\") pod \"rabbitmq-server-0\" (UID: \"22b21d12-b525-43e8-a649-a65f4be978ca\") " pod="openstack/rabbitmq-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.928726 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.955922 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5tb2\" (UniqueName: \"kubernetes.io/projected/e7cd6fbc-98b4-42ff-aa55-b9608806b823-kube-api-access-p5tb2\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956012 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7cd6fbc-98b4-42ff-aa55-b9608806b823-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956060 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7cd6fbc-98b4-42ff-aa55-b9608806b823-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956116 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7cd6fbc-98b4-42ff-aa55-b9608806b823-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956146 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956203 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956239 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956278 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.956308 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7cd6fbc-98b4-42ff-aa55-b9608806b823-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.958760 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7cd6fbc-98b4-42ff-aa55-b9608806b823-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.960688 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7cd6fbc-98b4-42ff-aa55-b9608806b823-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.962553 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7cd6fbc-98b4-42ff-aa55-b9608806b823-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.963585 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7cd6fbc-98b4-42ff-aa55-b9608806b823-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.963951 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.966756 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.970066 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7cd6fbc-98b4-42ff-aa55-b9608806b823-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.976285 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5tb2\" (UniqueName: \"kubernetes.io/projected/e7cd6fbc-98b4-42ff-aa55-b9608806b823-kube-api-access-p5tb2\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.979400 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.979484 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/90152b39ca3535b1b2a5e3963e7e39507c984e6965f0e8098d13cacf3eafa19c/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:58 crc kubenswrapper[4811]: I0128 17:06:58.984792 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.005314 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4ace5ac5-8e12-41aa-94ea-22acb0747f57\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7cd6fbc-98b4-42ff-aa55-b9608806b823\") " pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.040204 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.056966 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdv4h\" (UniqueName: \"kubernetes.io/projected/d7f58280-b425-496c-865a-f2f9bdc31e49-kube-api-access-vdv4h\") pod \"d7f58280-b425-496c-865a-f2f9bdc31e49\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.057069 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-config\") pod \"d7f58280-b425-496c-865a-f2f9bdc31e49\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.057118 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-dns-svc\") pod \"d7f58280-b425-496c-865a-f2f9bdc31e49\" (UID: \"d7f58280-b425-496c-865a-f2f9bdc31e49\") " Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.060449 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f58280-b425-496c-865a-f2f9bdc31e49-kube-api-access-vdv4h" (OuterVolumeSpecName: "kube-api-access-vdv4h") pod "d7f58280-b425-496c-865a-f2f9bdc31e49" (UID: "d7f58280-b425-496c-865a-f2f9bdc31e49"). InnerVolumeSpecName "kube-api-access-vdv4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.095155 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d7f58280-b425-496c-865a-f2f9bdc31e49" (UID: "d7f58280-b425-496c-865a-f2f9bdc31e49"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.103190 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-config" (OuterVolumeSpecName: "config") pod "d7f58280-b425-496c-865a-f2f9bdc31e49" (UID: "d7f58280-b425-496c-865a-f2f9bdc31e49"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.159142 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdv4h\" (UniqueName: \"kubernetes.io/projected/d7f58280-b425-496c-865a-f2f9bdc31e49-kube-api-access-vdv4h\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.159186 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.159199 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d7f58280-b425-496c-865a-f2f9bdc31e49-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.425820 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.512789 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 28 17:06:59 crc kubenswrapper[4811]: W0128 17:06:59.535338 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7cd6fbc_98b4_42ff_aa55_b9608806b823.slice/crio-6761618686fdf713ef2756b42e4de488503810db5740088a7e68c2665e62595b WatchSource:0}: Error finding container 6761618686fdf713ef2756b42e4de488503810db5740088a7e68c2665e62595b: Status 404 returned error can't find the container with id 6761618686fdf713ef2756b42e4de488503810db5740088a7e68c2665e62595b Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.582387 4811 generic.go:334] "Generic (PLEG): container finished" podID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerID="16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e" exitCode=0 Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.582455 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.582496 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" event={"ID":"d7f58280-b425-496c-865a-f2f9bdc31e49","Type":"ContainerDied","Data":"16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e"} Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.582530 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-qpvvf" event={"ID":"d7f58280-b425-496c-865a-f2f9bdc31e49","Type":"ContainerDied","Data":"e9a7f300644a963a5a5b77789618aa15a1fd024c91f229d7529305dfd56b9c82"} Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.582546 4811 scope.go:117] "RemoveContainer" containerID="16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.590587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7cd6fbc-98b4-42ff-aa55-b9608806b823","Type":"ContainerStarted","Data":"6761618686fdf713ef2756b42e4de488503810db5740088a7e68c2665e62595b"} Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.594216 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"22b21d12-b525-43e8-a649-a65f4be978ca","Type":"ContainerStarted","Data":"4d1bd939a4332e9cff9116b837bc80b2ada816cac14178e14abc2751197c6c78"} Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.606946 4811 scope.go:117] "RemoveContainer" containerID="49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.621007 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-qpvvf"] Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.629919 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-qpvvf"] Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.659130 4811 scope.go:117] "RemoveContainer" containerID="16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e" Jan 28 17:06:59 crc kubenswrapper[4811]: E0128 17:06:59.659639 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e\": container with ID starting with 16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e not found: ID does not exist" containerID="16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.659673 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e"} err="failed to get container status \"16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e\": rpc error: code = NotFound desc = could not find container \"16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e\": container with ID starting with 16c2a91803a179831de9db2472185e998f5c09d31ca55aaf07b6cc1c5f243b0e not found: ID does not exist" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.659697 4811 scope.go:117] "RemoveContainer" containerID="49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa" Jan 28 17:06:59 crc kubenswrapper[4811]: E0128 17:06:59.659994 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa\": container with ID starting with 49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa not found: ID does not exist" containerID="49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa" Jan 28 17:06:59 crc kubenswrapper[4811]: I0128 17:06:59.660041 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa"} err="failed to get container status \"49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa\": rpc error: code = NotFound desc = could not find container \"49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa\": container with ID starting with 49fe97597e35b4538564331129ca087ccf4b3c845483c790077617f978a96efa not found: ID does not exist" Jan 28 17:07:00 crc kubenswrapper[4811]: I0128 17:07:00.351042 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc" path="/var/lib/kubelet/pods/4c6fabcf-1ab8-45c7-b2bf-9a372b4e72fc/volumes" Jan 28 17:07:00 crc kubenswrapper[4811]: I0128 17:07:00.352684 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c942249-251a-4607-9f2d-c14456dd5264" path="/var/lib/kubelet/pods/8c942249-251a-4607-9f2d-c14456dd5264/volumes" Jan 28 17:07:00 crc kubenswrapper[4811]: I0128 17:07:00.354609 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" path="/var/lib/kubelet/pods/d7f58280-b425-496c-865a-f2f9bdc31e49/volumes" Jan 28 17:07:01 crc kubenswrapper[4811]: I0128 17:07:01.620794 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"22b21d12-b525-43e8-a649-a65f4be978ca","Type":"ContainerStarted","Data":"997a6ebf49a39d1f1965376e791a27f2a647bcca8b849536678fac5930239922"} Jan 28 17:07:01 crc kubenswrapper[4811]: I0128 17:07:01.623855 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7cd6fbc-98b4-42ff-aa55-b9608806b823","Type":"ContainerStarted","Data":"a35aa777f445070c79cfce25547b49b7e2bf242a08911fa5691b736529ae4b82"} Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.087234 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.087289 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.087326 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.087992 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.088039 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" gracePeriod=600 Jan 28 17:07:03 crc kubenswrapper[4811]: E0128 17:07:03.219864 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.640601 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" exitCode=0 Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.640688 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc"} Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.640934 4811 scope.go:117] "RemoveContainer" containerID="5bb331d50da91e956ddfd0ead12512bba7568cb388527ac7ffc8cf3f494a2d49" Jan 28 17:07:03 crc kubenswrapper[4811]: I0128 17:07:03.641579 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:07:03 crc kubenswrapper[4811]: E0128 17:07:03.641882 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:07:17 crc kubenswrapper[4811]: I0128 17:07:17.338983 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:07:17 crc kubenswrapper[4811]: E0128 17:07:17.339785 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:07:31 crc kubenswrapper[4811]: I0128 17:07:31.339537 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:07:31 crc kubenswrapper[4811]: E0128 17:07:31.340250 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:07:32 crc kubenswrapper[4811]: I0128 17:07:32.843420 4811 generic.go:334] "Generic (PLEG): container finished" podID="22b21d12-b525-43e8-a649-a65f4be978ca" containerID="997a6ebf49a39d1f1965376e791a27f2a647bcca8b849536678fac5930239922" exitCode=0 Jan 28 17:07:32 crc kubenswrapper[4811]: I0128 17:07:32.843468 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"22b21d12-b525-43e8-a649-a65f4be978ca","Type":"ContainerDied","Data":"997a6ebf49a39d1f1965376e791a27f2a647bcca8b849536678fac5930239922"} Jan 28 17:07:33 crc kubenswrapper[4811]: I0128 17:07:33.853595 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"22b21d12-b525-43e8-a649-a65f4be978ca","Type":"ContainerStarted","Data":"35bfbefae58cede1287307b2edce86a85d1d34cf3325edd6f186d0ed41db6c8a"} Jan 28 17:07:33 crc kubenswrapper[4811]: I0128 17:07:33.854940 4811 generic.go:334] "Generic (PLEG): container finished" podID="e7cd6fbc-98b4-42ff-aa55-b9608806b823" containerID="a35aa777f445070c79cfce25547b49b7e2bf242a08911fa5691b736529ae4b82" exitCode=0 Jan 28 17:07:33 crc kubenswrapper[4811]: I0128 17:07:33.854986 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7cd6fbc-98b4-42ff-aa55-b9608806b823","Type":"ContainerDied","Data":"a35aa777f445070c79cfce25547b49b7e2bf242a08911fa5691b736529ae4b82"} Jan 28 17:07:33 crc kubenswrapper[4811]: I0128 17:07:33.897642 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=35.897602176 podStartE2EDuration="35.897602176s" podCreationTimestamp="2026-01-28 17:06:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:07:33.881334603 +0000 UTC m=+4946.635698176" watchObservedRunningTime="2026-01-28 17:07:33.897602176 +0000 UTC m=+4946.651965759" Jan 28 17:07:34 crc kubenswrapper[4811]: I0128 17:07:34.863616 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7cd6fbc-98b4-42ff-aa55-b9608806b823","Type":"ContainerStarted","Data":"147ebe20946c516c522ac85c3cb5686cb02a8558dfa2a142e56a9305ce82b4f7"} Jan 28 17:07:34 crc kubenswrapper[4811]: I0128 17:07:34.864186 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:07:34 crc kubenswrapper[4811]: I0128 17:07:34.889040 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.889019457 podStartE2EDuration="36.889019457s" podCreationTimestamp="2026-01-28 17:06:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:07:34.887264509 +0000 UTC m=+4947.641628112" watchObservedRunningTime="2026-01-28 17:07:34.889019457 +0000 UTC m=+4947.643383040" Jan 28 17:07:38 crc kubenswrapper[4811]: I0128 17:07:38.985206 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 28 17:07:46 crc kubenswrapper[4811]: I0128 17:07:46.340105 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:07:46 crc kubenswrapper[4811]: E0128 17:07:46.340876 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:07:48 crc kubenswrapper[4811]: I0128 17:07:48.987181 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 28 17:07:49 crc kubenswrapper[4811]: I0128 17:07:49.042335 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 28 17:07:58 crc kubenswrapper[4811]: I0128 17:07:58.344608 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:07:58 crc kubenswrapper[4811]: E0128 17:07:58.345311 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.613116 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 28 17:07:59 crc kubenswrapper[4811]: E0128 17:07:59.613674 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerName="init" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.613685 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerName="init" Jan 28 17:07:59 crc kubenswrapper[4811]: E0128 17:07:59.613722 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerName="dnsmasq-dns" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.613731 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerName="dnsmasq-dns" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.613868 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f58280-b425-496c-865a-f2f9bdc31e49" containerName="dnsmasq-dns" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.614304 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.616351 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-g8fh4" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.624173 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.740544 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sft7\" (UniqueName: \"kubernetes.io/projected/5187253e-beba-41d2-b311-58710b1c5ac7-kube-api-access-9sft7\") pod \"mariadb-client\" (UID: \"5187253e-beba-41d2-b311-58710b1c5ac7\") " pod="openstack/mariadb-client" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.842378 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sft7\" (UniqueName: \"kubernetes.io/projected/5187253e-beba-41d2-b311-58710b1c5ac7-kube-api-access-9sft7\") pod \"mariadb-client\" (UID: \"5187253e-beba-41d2-b311-58710b1c5ac7\") " pod="openstack/mariadb-client" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.862928 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sft7\" (UniqueName: \"kubernetes.io/projected/5187253e-beba-41d2-b311-58710b1c5ac7-kube-api-access-9sft7\") pod \"mariadb-client\" (UID: \"5187253e-beba-41d2-b311-58710b1c5ac7\") " pod="openstack/mariadb-client" Jan 28 17:07:59 crc kubenswrapper[4811]: I0128 17:07:59.930827 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:08:00 crc kubenswrapper[4811]: I0128 17:08:00.266660 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:08:00 crc kubenswrapper[4811]: I0128 17:08:00.267604 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:08:01 crc kubenswrapper[4811]: I0128 17:08:01.065202 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5187253e-beba-41d2-b311-58710b1c5ac7","Type":"ContainerStarted","Data":"afb9f48a36df1b2b223a2e1c2b8d182dde32f652902ae14bd00b46e1978446c6"} Jan 28 17:08:06 crc kubenswrapper[4811]: I0128 17:08:06.098009 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5187253e-beba-41d2-b311-58710b1c5ac7","Type":"ContainerStarted","Data":"0b51dc401b84268d5c986428fbb7838e0761e978f47ff01f4c6321db54ebabfd"} Jan 28 17:08:06 crc kubenswrapper[4811]: I0128 17:08:06.114301 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client" podStartSLOduration=2.17621107 podStartE2EDuration="7.114278618s" podCreationTimestamp="2026-01-28 17:07:59 +0000 UTC" firstStartedPulling="2026-01-28 17:08:00.266450022 +0000 UTC m=+4973.020813605" lastFinishedPulling="2026-01-28 17:08:05.20451756 +0000 UTC m=+4977.958881153" observedRunningTime="2026-01-28 17:08:06.11032398 +0000 UTC m=+4978.864687563" watchObservedRunningTime="2026-01-28 17:08:06.114278618 +0000 UTC m=+4978.868642191" Jan 28 17:08:11 crc kubenswrapper[4811]: I0128 17:08:11.339799 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:08:11 crc kubenswrapper[4811]: E0128 17:08:11.340532 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:08:17 crc kubenswrapper[4811]: I0128 17:08:17.842927 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:08:17 crc kubenswrapper[4811]: I0128 17:08:17.843671 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-client" podUID="5187253e-beba-41d2-b311-58710b1c5ac7" containerName="mariadb-client" containerID="cri-o://0b51dc401b84268d5c986428fbb7838e0761e978f47ff01f4c6321db54ebabfd" gracePeriod=30 Jan 28 17:08:18 crc kubenswrapper[4811]: I0128 17:08:18.185890 4811 generic.go:334] "Generic (PLEG): container finished" podID="5187253e-beba-41d2-b311-58710b1c5ac7" containerID="0b51dc401b84268d5c986428fbb7838e0761e978f47ff01f4c6321db54ebabfd" exitCode=143 Jan 28 17:08:18 crc kubenswrapper[4811]: I0128 17:08:18.185951 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5187253e-beba-41d2-b311-58710b1c5ac7","Type":"ContainerDied","Data":"0b51dc401b84268d5c986428fbb7838e0761e978f47ff01f4c6321db54ebabfd"} Jan 28 17:08:18 crc kubenswrapper[4811]: I0128 17:08:18.920895 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:08:18 crc kubenswrapper[4811]: I0128 17:08:18.928223 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sft7\" (UniqueName: \"kubernetes.io/projected/5187253e-beba-41d2-b311-58710b1c5ac7-kube-api-access-9sft7\") pod \"5187253e-beba-41d2-b311-58710b1c5ac7\" (UID: \"5187253e-beba-41d2-b311-58710b1c5ac7\") " Jan 28 17:08:18 crc kubenswrapper[4811]: I0128 17:08:18.938663 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5187253e-beba-41d2-b311-58710b1c5ac7-kube-api-access-9sft7" (OuterVolumeSpecName: "kube-api-access-9sft7") pod "5187253e-beba-41d2-b311-58710b1c5ac7" (UID: "5187253e-beba-41d2-b311-58710b1c5ac7"). InnerVolumeSpecName "kube-api-access-9sft7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:08:19 crc kubenswrapper[4811]: I0128 17:08:19.034239 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sft7\" (UniqueName: \"kubernetes.io/projected/5187253e-beba-41d2-b311-58710b1c5ac7-kube-api-access-9sft7\") on node \"crc\" DevicePath \"\"" Jan 28 17:08:19 crc kubenswrapper[4811]: I0128 17:08:19.193623 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5187253e-beba-41d2-b311-58710b1c5ac7","Type":"ContainerDied","Data":"afb9f48a36df1b2b223a2e1c2b8d182dde32f652902ae14bd00b46e1978446c6"} Jan 28 17:08:19 crc kubenswrapper[4811]: I0128 17:08:19.193678 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:08:19 crc kubenswrapper[4811]: I0128 17:08:19.193918 4811 scope.go:117] "RemoveContainer" containerID="0b51dc401b84268d5c986428fbb7838e0761e978f47ff01f4c6321db54ebabfd" Jan 28 17:08:19 crc kubenswrapper[4811]: I0128 17:08:19.226379 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:08:19 crc kubenswrapper[4811]: I0128 17:08:19.233043 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:08:20 crc kubenswrapper[4811]: I0128 17:08:20.347997 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5187253e-beba-41d2-b311-58710b1c5ac7" path="/var/lib/kubelet/pods/5187253e-beba-41d2-b311-58710b1c5ac7/volumes" Jan 28 17:08:24 crc kubenswrapper[4811]: I0128 17:08:24.339350 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:08:24 crc kubenswrapper[4811]: E0128 17:08:24.339841 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:08:38 crc kubenswrapper[4811]: I0128 17:08:38.343147 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:08:38 crc kubenswrapper[4811]: E0128 17:08:38.344060 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.474116 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dqjbx"] Jan 28 17:08:41 crc kubenswrapper[4811]: E0128 17:08:41.474791 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5187253e-beba-41d2-b311-58710b1c5ac7" containerName="mariadb-client" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.474806 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5187253e-beba-41d2-b311-58710b1c5ac7" containerName="mariadb-client" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.474934 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5187253e-beba-41d2-b311-58710b1c5ac7" containerName="mariadb-client" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.476018 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.488806 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqjbx"] Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.515000 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-utilities\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.515066 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js2sx\" (UniqueName: \"kubernetes.io/projected/d278b39f-59af-4da5-b964-e6e1f2b3137d-kube-api-access-js2sx\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.515128 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-catalog-content\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.616787 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-utilities\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.617140 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js2sx\" (UniqueName: \"kubernetes.io/projected/d278b39f-59af-4da5-b964-e6e1f2b3137d-kube-api-access-js2sx\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.617199 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-catalog-content\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.617357 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-utilities\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.617676 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-catalog-content\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.641979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js2sx\" (UniqueName: \"kubernetes.io/projected/d278b39f-59af-4da5-b964-e6e1f2b3137d-kube-api-access-js2sx\") pod \"redhat-marketplace-dqjbx\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:41 crc kubenswrapper[4811]: I0128 17:08:41.807530 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:42 crc kubenswrapper[4811]: I0128 17:08:42.250738 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqjbx"] Jan 28 17:08:42 crc kubenswrapper[4811]: I0128 17:08:42.364097 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqjbx" event={"ID":"d278b39f-59af-4da5-b964-e6e1f2b3137d","Type":"ContainerStarted","Data":"aff982fe8ae399d4bb2c83bcda9f86f9701dd45c14dbacb4e0b64183f85c39cd"} Jan 28 17:08:43 crc kubenswrapper[4811]: I0128 17:08:43.373968 4811 generic.go:334] "Generic (PLEG): container finished" podID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerID="915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484" exitCode=0 Jan 28 17:08:43 crc kubenswrapper[4811]: I0128 17:08:43.374263 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqjbx" event={"ID":"d278b39f-59af-4da5-b964-e6e1f2b3137d","Type":"ContainerDied","Data":"915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484"} Jan 28 17:08:46 crc kubenswrapper[4811]: I0128 17:08:46.396666 4811 generic.go:334] "Generic (PLEG): container finished" podID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerID="6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20" exitCode=0 Jan 28 17:08:46 crc kubenswrapper[4811]: I0128 17:08:46.397277 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqjbx" event={"ID":"d278b39f-59af-4da5-b964-e6e1f2b3137d","Type":"ContainerDied","Data":"6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20"} Jan 28 17:08:47 crc kubenswrapper[4811]: I0128 17:08:47.406583 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqjbx" event={"ID":"d278b39f-59af-4da5-b964-e6e1f2b3137d","Type":"ContainerStarted","Data":"b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa"} Jan 28 17:08:47 crc kubenswrapper[4811]: I0128 17:08:47.426299 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dqjbx" podStartSLOduration=2.7691058809999998 podStartE2EDuration="6.426284455s" podCreationTimestamp="2026-01-28 17:08:41 +0000 UTC" firstStartedPulling="2026-01-28 17:08:43.376269552 +0000 UTC m=+5016.130633135" lastFinishedPulling="2026-01-28 17:08:47.033448126 +0000 UTC m=+5019.787811709" observedRunningTime="2026-01-28 17:08:47.422384398 +0000 UTC m=+5020.176748001" watchObservedRunningTime="2026-01-28 17:08:47.426284455 +0000 UTC m=+5020.180648038" Jan 28 17:08:51 crc kubenswrapper[4811]: I0128 17:08:51.808542 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:51 crc kubenswrapper[4811]: I0128 17:08:51.808842 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:51 crc kubenswrapper[4811]: I0128 17:08:51.850703 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:52 crc kubenswrapper[4811]: I0128 17:08:52.339730 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:08:52 crc kubenswrapper[4811]: E0128 17:08:52.340009 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:08:52 crc kubenswrapper[4811]: I0128 17:08:52.482119 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:52 crc kubenswrapper[4811]: I0128 17:08:52.536895 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqjbx"] Jan 28 17:08:54 crc kubenswrapper[4811]: I0128 17:08:54.467984 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dqjbx" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="registry-server" containerID="cri-o://b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa" gracePeriod=2 Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.413905 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.476541 4811 generic.go:334] "Generic (PLEG): container finished" podID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerID="b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa" exitCode=0 Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.476576 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqjbx" event={"ID":"d278b39f-59af-4da5-b964-e6e1f2b3137d","Type":"ContainerDied","Data":"b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa"} Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.476614 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqjbx" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.476639 4811 scope.go:117] "RemoveContainer" containerID="b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.476625 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqjbx" event={"ID":"d278b39f-59af-4da5-b964-e6e1f2b3137d","Type":"ContainerDied","Data":"aff982fe8ae399d4bb2c83bcda9f86f9701dd45c14dbacb4e0b64183f85c39cd"} Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.492526 4811 scope.go:117] "RemoveContainer" containerID="6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.509868 4811 scope.go:117] "RemoveContainer" containerID="915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.534511 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-utilities\") pod \"d278b39f-59af-4da5-b964-e6e1f2b3137d\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.534591 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-catalog-content\") pod \"d278b39f-59af-4da5-b964-e6e1f2b3137d\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.534674 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js2sx\" (UniqueName: \"kubernetes.io/projected/d278b39f-59af-4da5-b964-e6e1f2b3137d-kube-api-access-js2sx\") pod \"d278b39f-59af-4da5-b964-e6e1f2b3137d\" (UID: \"d278b39f-59af-4da5-b964-e6e1f2b3137d\") " Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.535490 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-utilities" (OuterVolumeSpecName: "utilities") pod "d278b39f-59af-4da5-b964-e6e1f2b3137d" (UID: "d278b39f-59af-4da5-b964-e6e1f2b3137d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.542037 4811 scope.go:117] "RemoveContainer" containerID="b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.542059 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d278b39f-59af-4da5-b964-e6e1f2b3137d-kube-api-access-js2sx" (OuterVolumeSpecName: "kube-api-access-js2sx") pod "d278b39f-59af-4da5-b964-e6e1f2b3137d" (UID: "d278b39f-59af-4da5-b964-e6e1f2b3137d"). InnerVolumeSpecName "kube-api-access-js2sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:08:55 crc kubenswrapper[4811]: E0128 17:08:55.542766 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa\": container with ID starting with b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa not found: ID does not exist" containerID="b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.542813 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa"} err="failed to get container status \"b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa\": rpc error: code = NotFound desc = could not find container \"b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa\": container with ID starting with b7725e738951466b331e9026608a409112ffc7ff196f17dcbaaf0da8e14516aa not found: ID does not exist" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.542834 4811 scope.go:117] "RemoveContainer" containerID="6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20" Jan 28 17:08:55 crc kubenswrapper[4811]: E0128 17:08:55.543086 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20\": container with ID starting with 6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20 not found: ID does not exist" containerID="6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.543109 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20"} err="failed to get container status \"6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20\": rpc error: code = NotFound desc = could not find container \"6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20\": container with ID starting with 6e8e6a17ed76737d09d08b68bfd1ad8536ce55aa3d22b25ca30d75f1a51d5e20 not found: ID does not exist" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.543122 4811 scope.go:117] "RemoveContainer" containerID="915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484" Jan 28 17:08:55 crc kubenswrapper[4811]: E0128 17:08:55.543370 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484\": container with ID starting with 915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484 not found: ID does not exist" containerID="915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.543391 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484"} err="failed to get container status \"915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484\": rpc error: code = NotFound desc = could not find container \"915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484\": container with ID starting with 915f04bc2af7e40ba745fa4be2168176c15d017d9306bebabc675c5868de2484 not found: ID does not exist" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.559859 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d278b39f-59af-4da5-b964-e6e1f2b3137d" (UID: "d278b39f-59af-4da5-b964-e6e1f2b3137d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.636916 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.637233 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d278b39f-59af-4da5-b964-e6e1f2b3137d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.637251 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js2sx\" (UniqueName: \"kubernetes.io/projected/d278b39f-59af-4da5-b964-e6e1f2b3137d-kube-api-access-js2sx\") on node \"crc\" DevicePath \"\"" Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.812620 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqjbx"] Jan 28 17:08:55 crc kubenswrapper[4811]: I0128 17:08:55.823636 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqjbx"] Jan 28 17:08:56 crc kubenswrapper[4811]: I0128 17:08:56.352242 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" path="/var/lib/kubelet/pods/d278b39f-59af-4da5-b964-e6e1f2b3137d/volumes" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.606353 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rdkg2"] Jan 28 17:08:58 crc kubenswrapper[4811]: E0128 17:08:58.606932 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="extract-utilities" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.606946 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="extract-utilities" Jan 28 17:08:58 crc kubenswrapper[4811]: E0128 17:08:58.606972 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="registry-server" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.606978 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="registry-server" Jan 28 17:08:58 crc kubenswrapper[4811]: E0128 17:08:58.606987 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="extract-content" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.606993 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="extract-content" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.607134 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d278b39f-59af-4da5-b964-e6e1f2b3137d" containerName="registry-server" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.608300 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.618287 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rdkg2"] Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.681804 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-utilities\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.681987 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-catalog-content\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.682030 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z48gd\" (UniqueName: \"kubernetes.io/projected/1399b195-06b6-42fd-b953-cb3499299c9b-kube-api-access-z48gd\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.783519 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z48gd\" (UniqueName: \"kubernetes.io/projected/1399b195-06b6-42fd-b953-cb3499299c9b-kube-api-access-z48gd\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.783582 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-utilities\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.783711 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-catalog-content\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.784385 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-catalog-content\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.784402 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-utilities\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.802417 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z48gd\" (UniqueName: \"kubernetes.io/projected/1399b195-06b6-42fd-b953-cb3499299c9b-kube-api-access-z48gd\") pod \"redhat-operators-rdkg2\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:58 crc kubenswrapper[4811]: I0128 17:08:58.929098 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:08:59 crc kubenswrapper[4811]: I0128 17:08:59.358865 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rdkg2"] Jan 28 17:08:59 crc kubenswrapper[4811]: I0128 17:08:59.510059 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerStarted","Data":"6171967946f4390608a02835f4b7c1ff9e5eb7a9b2a640d235006e2b01bf67ba"} Jan 28 17:09:00 crc kubenswrapper[4811]: I0128 17:09:00.520468 4811 generic.go:334] "Generic (PLEG): container finished" podID="1399b195-06b6-42fd-b953-cb3499299c9b" containerID="cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977" exitCode=0 Jan 28 17:09:00 crc kubenswrapper[4811]: I0128 17:09:00.520540 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerDied","Data":"cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977"} Jan 28 17:09:01 crc kubenswrapper[4811]: I0128 17:09:01.534325 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerStarted","Data":"911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8"} Jan 28 17:09:02 crc kubenswrapper[4811]: I0128 17:09:02.548154 4811 generic.go:334] "Generic (PLEG): container finished" podID="1399b195-06b6-42fd-b953-cb3499299c9b" containerID="911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8" exitCode=0 Jan 28 17:09:02 crc kubenswrapper[4811]: I0128 17:09:02.548254 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerDied","Data":"911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8"} Jan 28 17:09:03 crc kubenswrapper[4811]: I0128 17:09:03.560265 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerStarted","Data":"ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab"} Jan 28 17:09:03 crc kubenswrapper[4811]: I0128 17:09:03.577300 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rdkg2" podStartSLOduration=2.83496571 podStartE2EDuration="5.577281888s" podCreationTimestamp="2026-01-28 17:08:58 +0000 UTC" firstStartedPulling="2026-01-28 17:09:00.522364367 +0000 UTC m=+5033.276727960" lastFinishedPulling="2026-01-28 17:09:03.264680545 +0000 UTC m=+5036.019044138" observedRunningTime="2026-01-28 17:09:03.57589137 +0000 UTC m=+5036.330254983" watchObservedRunningTime="2026-01-28 17:09:03.577281888 +0000 UTC m=+5036.331645481" Jan 28 17:09:06 crc kubenswrapper[4811]: I0128 17:09:06.339256 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:09:06 crc kubenswrapper[4811]: E0128 17:09:06.339833 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:09:08 crc kubenswrapper[4811]: I0128 17:09:08.929998 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:09:08 crc kubenswrapper[4811]: I0128 17:09:08.930089 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:09:08 crc kubenswrapper[4811]: I0128 17:09:08.972363 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:09:09 crc kubenswrapper[4811]: I0128 17:09:09.651732 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:09:09 crc kubenswrapper[4811]: I0128 17:09:09.696082 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rdkg2"] Jan 28 17:09:11 crc kubenswrapper[4811]: I0128 17:09:11.620504 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rdkg2" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="registry-server" containerID="cri-o://ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab" gracePeriod=2 Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.176025 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.198537 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-catalog-content\") pod \"1399b195-06b6-42fd-b953-cb3499299c9b\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.198680 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z48gd\" (UniqueName: \"kubernetes.io/projected/1399b195-06b6-42fd-b953-cb3499299c9b-kube-api-access-z48gd\") pod \"1399b195-06b6-42fd-b953-cb3499299c9b\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.198711 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-utilities\") pod \"1399b195-06b6-42fd-b953-cb3499299c9b\" (UID: \"1399b195-06b6-42fd-b953-cb3499299c9b\") " Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.199797 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-utilities" (OuterVolumeSpecName: "utilities") pod "1399b195-06b6-42fd-b953-cb3499299c9b" (UID: "1399b195-06b6-42fd-b953-cb3499299c9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.207677 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1399b195-06b6-42fd-b953-cb3499299c9b-kube-api-access-z48gd" (OuterVolumeSpecName: "kube-api-access-z48gd") pod "1399b195-06b6-42fd-b953-cb3499299c9b" (UID: "1399b195-06b6-42fd-b953-cb3499299c9b"). InnerVolumeSpecName "kube-api-access-z48gd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.300866 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z48gd\" (UniqueName: \"kubernetes.io/projected/1399b195-06b6-42fd-b953-cb3499299c9b-kube-api-access-z48gd\") on node \"crc\" DevicePath \"\"" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.300913 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.331031 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1399b195-06b6-42fd-b953-cb3499299c9b" (UID: "1399b195-06b6-42fd-b953-cb3499299c9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.403882 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1399b195-06b6-42fd-b953-cb3499299c9b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.628900 4811 generic.go:334] "Generic (PLEG): container finished" podID="1399b195-06b6-42fd-b953-cb3499299c9b" containerID="ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab" exitCode=0 Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.628964 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdkg2" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.628949 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerDied","Data":"ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab"} Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.629369 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdkg2" event={"ID":"1399b195-06b6-42fd-b953-cb3499299c9b","Type":"ContainerDied","Data":"6171967946f4390608a02835f4b7c1ff9e5eb7a9b2a640d235006e2b01bf67ba"} Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.629399 4811 scope.go:117] "RemoveContainer" containerID="ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.651724 4811 scope.go:117] "RemoveContainer" containerID="911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.684790 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rdkg2"] Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.685609 4811 scope.go:117] "RemoveContainer" containerID="cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.699439 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rdkg2"] Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.719991 4811 scope.go:117] "RemoveContainer" containerID="ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab" Jan 28 17:09:12 crc kubenswrapper[4811]: E0128 17:09:12.720492 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab\": container with ID starting with ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab not found: ID does not exist" containerID="ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.720524 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab"} err="failed to get container status \"ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab\": rpc error: code = NotFound desc = could not find container \"ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab\": container with ID starting with ddbf03478c8d4f2761b24afd0e359b5cc813b1ce1388efc8ad6e5943a6076bab not found: ID does not exist" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.720550 4811 scope.go:117] "RemoveContainer" containerID="911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8" Jan 28 17:09:12 crc kubenswrapper[4811]: E0128 17:09:12.720811 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8\": container with ID starting with 911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8 not found: ID does not exist" containerID="911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.720864 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8"} err="failed to get container status \"911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8\": rpc error: code = NotFound desc = could not find container \"911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8\": container with ID starting with 911b0ac34f1473b28bc45e29224cf780a682900e3b5cf254231b8d80b680fbe8 not found: ID does not exist" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.720894 4811 scope.go:117] "RemoveContainer" containerID="cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977" Jan 28 17:09:12 crc kubenswrapper[4811]: E0128 17:09:12.721294 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977\": container with ID starting with cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977 not found: ID does not exist" containerID="cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977" Jan 28 17:09:12 crc kubenswrapper[4811]: I0128 17:09:12.721325 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977"} err="failed to get container status \"cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977\": rpc error: code = NotFound desc = could not find container \"cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977\": container with ID starting with cc2687903c722518362aa2409542c06b86922eec70046fe9bfbe0724fa1c9977 not found: ID does not exist" Jan 28 17:09:14 crc kubenswrapper[4811]: I0128 17:09:14.348866 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" path="/var/lib/kubelet/pods/1399b195-06b6-42fd-b953-cb3499299c9b/volumes" Jan 28 17:09:19 crc kubenswrapper[4811]: I0128 17:09:19.339644 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:09:19 crc kubenswrapper[4811]: E0128 17:09:19.340361 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:09:30 crc kubenswrapper[4811]: I0128 17:09:30.113191 4811 scope.go:117] "RemoveContainer" containerID="331128c1b82f83cc760c7816874ada2dba6116550501e6e7e88f5a9b2777c088" Jan 28 17:09:30 crc kubenswrapper[4811]: I0128 17:09:30.339535 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:09:30 crc kubenswrapper[4811]: E0128 17:09:30.340172 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:09:41 crc kubenswrapper[4811]: I0128 17:09:41.340091 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:09:41 crc kubenswrapper[4811]: E0128 17:09:41.340807 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:09:56 crc kubenswrapper[4811]: I0128 17:09:56.339902 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:09:56 crc kubenswrapper[4811]: E0128 17:09:56.340633 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:10:11 crc kubenswrapper[4811]: I0128 17:10:11.339703 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:10:11 crc kubenswrapper[4811]: E0128 17:10:11.340398 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:10:24 crc kubenswrapper[4811]: I0128 17:10:24.340369 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:10:24 crc kubenswrapper[4811]: E0128 17:10:24.341330 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:10:39 crc kubenswrapper[4811]: I0128 17:10:39.340738 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:10:39 crc kubenswrapper[4811]: E0128 17:10:39.341715 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:10:53 crc kubenswrapper[4811]: I0128 17:10:53.339830 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:10:53 crc kubenswrapper[4811]: E0128 17:10:53.340743 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:11:07 crc kubenswrapper[4811]: I0128 17:11:07.339258 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:11:07 crc kubenswrapper[4811]: E0128 17:11:07.340078 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:11:22 crc kubenswrapper[4811]: I0128 17:11:22.339655 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:11:22 crc kubenswrapper[4811]: E0128 17:11:22.340370 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:11:35 crc kubenswrapper[4811]: I0128 17:11:35.339746 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:11:35 crc kubenswrapper[4811]: E0128 17:11:35.340773 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:11:49 crc kubenswrapper[4811]: I0128 17:11:49.339915 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:11:49 crc kubenswrapper[4811]: E0128 17:11:49.340637 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:12:00 crc kubenswrapper[4811]: I0128 17:12:00.339258 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:12:00 crc kubenswrapper[4811]: E0128 17:12:00.340006 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:12:12 crc kubenswrapper[4811]: I0128 17:12:12.338957 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:12:12 crc kubenswrapper[4811]: I0128 17:12:12.892145 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"aa24462d3859954f62e518a2517851c0125e8860de36202094bb80d1adc65f98"} Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.830852 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Jan 28 17:12:25 crc kubenswrapper[4811]: E0128 17:12:25.831777 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="registry-server" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.831793 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="registry-server" Jan 28 17:12:25 crc kubenswrapper[4811]: E0128 17:12:25.831822 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="extract-content" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.831831 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="extract-content" Jan 28 17:12:25 crc kubenswrapper[4811]: E0128 17:12:25.831840 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="extract-utilities" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.831848 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="extract-utilities" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.832024 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1399b195-06b6-42fd-b953-cb3499299c9b" containerName="registry-server" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.832807 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.838333 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-g8fh4" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.840738 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.951495 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") " pod="openstack/mariadb-copy-data" Jan 28 17:12:25 crc kubenswrapper[4811]: I0128 17:12:25.951583 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxq7j\" (UniqueName: \"kubernetes.io/projected/b41b9853-60d7-4111-86b2-5f6e4f9d242f-kube-api-access-qxq7j\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") " pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.052901 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") " pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.052971 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxq7j\" (UniqueName: \"kubernetes.io/projected/b41b9853-60d7-4111-86b2-5f6e4f9d242f-kube-api-access-qxq7j\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") " pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.057742 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.057796 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c64f182193d404cb54134530471a25a34ce7a78dcb2cedd835ab667fd6937b96/globalmount\"" pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.076600 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxq7j\" (UniqueName: \"kubernetes.io/projected/b41b9853-60d7-4111-86b2-5f6e4f9d242f-kube-api-access-qxq7j\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") " pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.082755 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e11f47f7-d9cf-4161-9649-a4cd5ba461cb\") pod \"mariadb-copy-data\" (UID: \"b41b9853-60d7-4111-86b2-5f6e4f9d242f\") " pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.156612 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.631501 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.986820 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"b41b9853-60d7-4111-86b2-5f6e4f9d242f","Type":"ContainerStarted","Data":"c94a3a618594937ef7e4af328a86184c8a0c64bea9a4594946c5fac351c490d4"} Jan 28 17:12:26 crc kubenswrapper[4811]: I0128 17:12:26.987263 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"b41b9853-60d7-4111-86b2-5f6e4f9d242f","Type":"ContainerStarted","Data":"76883899500cbe114959b967b38a4d1e68d4b1af411fb81085fbba2560cd2e93"} Jan 28 17:12:27 crc kubenswrapper[4811]: I0128 17:12:27.002218 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.002200898 podStartE2EDuration="3.002200898s" podCreationTimestamp="2026-01-28 17:12:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:12:26.999339521 +0000 UTC m=+5239.753703104" watchObservedRunningTime="2026-01-28 17:12:27.002200898 +0000 UTC m=+5239.756564481" Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.462350 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.463818 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.471363 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.609241 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nddrn\" (UniqueName: \"kubernetes.io/projected/115a55aa-bd2e-4872-8c21-91b510f93692-kube-api-access-nddrn\") pod \"mariadb-client\" (UID: \"115a55aa-bd2e-4872-8c21-91b510f93692\") " pod="openstack/mariadb-client" Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.711209 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nddrn\" (UniqueName: \"kubernetes.io/projected/115a55aa-bd2e-4872-8c21-91b510f93692-kube-api-access-nddrn\") pod \"mariadb-client\" (UID: \"115a55aa-bd2e-4872-8c21-91b510f93692\") " pod="openstack/mariadb-client" Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.735495 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nddrn\" (UniqueName: \"kubernetes.io/projected/115a55aa-bd2e-4872-8c21-91b510f93692-kube-api-access-nddrn\") pod \"mariadb-client\" (UID: \"115a55aa-bd2e-4872-8c21-91b510f93692\") " pod="openstack/mariadb-client" Jan 28 17:12:29 crc kubenswrapper[4811]: I0128 17:12:29.788884 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:30 crc kubenswrapper[4811]: I0128 17:12:30.215933 4811 scope.go:117] "RemoveContainer" containerID="756a62fe1d19e54304eed8ff436bae7ee193151fdecef503b38a1e123e892c72" Jan 28 17:12:30 crc kubenswrapper[4811]: I0128 17:12:30.217197 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:30 crc kubenswrapper[4811]: I0128 17:12:30.302369 4811 scope.go:117] "RemoveContainer" containerID="7055b7934b4296a3bf57ec5ee52ce3b3a3a6cbfb576c0da2edbce6b46d7f4f51" Jan 28 17:12:30 crc kubenswrapper[4811]: I0128 17:12:30.321485 4811 scope.go:117] "RemoveContainer" containerID="4888d44d82390c4c2961477651a6164a249728702067c2317fb7fda465b4c21a" Jan 28 17:12:31 crc kubenswrapper[4811]: I0128 17:12:31.027795 4811 generic.go:334] "Generic (PLEG): container finished" podID="115a55aa-bd2e-4872-8c21-91b510f93692" containerID="07a8b98e5d1fd06278747cf9f48a130627fa149684ffb0fc255c9a572ac53f91" exitCode=0 Jan 28 17:12:31 crc kubenswrapper[4811]: I0128 17:12:31.027862 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"115a55aa-bd2e-4872-8c21-91b510f93692","Type":"ContainerDied","Data":"07a8b98e5d1fd06278747cf9f48a130627fa149684ffb0fc255c9a572ac53f91"} Jan 28 17:12:31 crc kubenswrapper[4811]: I0128 17:12:31.028398 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"115a55aa-bd2e-4872-8c21-91b510f93692","Type":"ContainerStarted","Data":"2dcdf001abb0196b3cac7e947d00a196f76dc30528acb913fdd6af0a50f5d35c"} Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.316850 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.339595 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_115a55aa-bd2e-4872-8c21-91b510f93692/mariadb-client/0.log" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.369757 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.377175 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.456902 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nddrn\" (UniqueName: \"kubernetes.io/projected/115a55aa-bd2e-4872-8c21-91b510f93692-kube-api-access-nddrn\") pod \"115a55aa-bd2e-4872-8c21-91b510f93692\" (UID: \"115a55aa-bd2e-4872-8c21-91b510f93692\") " Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.462102 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/115a55aa-bd2e-4872-8c21-91b510f93692-kube-api-access-nddrn" (OuterVolumeSpecName: "kube-api-access-nddrn") pod "115a55aa-bd2e-4872-8c21-91b510f93692" (UID: "115a55aa-bd2e-4872-8c21-91b510f93692"). InnerVolumeSpecName "kube-api-access-nddrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.478362 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:32 crc kubenswrapper[4811]: E0128 17:12:32.478755 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115a55aa-bd2e-4872-8c21-91b510f93692" containerName="mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.478778 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="115a55aa-bd2e-4872-8c21-91b510f93692" containerName="mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.479024 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="115a55aa-bd2e-4872-8c21-91b510f93692" containerName="mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.479908 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.487278 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.558861 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nddrn\" (UniqueName: \"kubernetes.io/projected/115a55aa-bd2e-4872-8c21-91b510f93692-kube-api-access-nddrn\") on node \"crc\" DevicePath \"\"" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.661478 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4xfx\" (UniqueName: \"kubernetes.io/projected/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44-kube-api-access-j4xfx\") pod \"mariadb-client\" (UID: \"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44\") " pod="openstack/mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.763174 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4xfx\" (UniqueName: \"kubernetes.io/projected/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44-kube-api-access-j4xfx\") pod \"mariadb-client\" (UID: \"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44\") " pod="openstack/mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.780398 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4xfx\" (UniqueName: \"kubernetes.io/projected/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44-kube-api-access-j4xfx\") pod \"mariadb-client\" (UID: \"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44\") " pod="openstack/mariadb-client" Jan 28 17:12:32 crc kubenswrapper[4811]: I0128 17:12:32.816581 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:33 crc kubenswrapper[4811]: I0128 17:12:33.044901 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dcdf001abb0196b3cac7e947d00a196f76dc30528acb913fdd6af0a50f5d35c" Jan 28 17:12:33 crc kubenswrapper[4811]: I0128 17:12:33.045218 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:33 crc kubenswrapper[4811]: I0128 17:12:33.064132 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="115a55aa-bd2e-4872-8c21-91b510f93692" podUID="f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" Jan 28 17:12:33 crc kubenswrapper[4811]: I0128 17:12:33.239764 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:34 crc kubenswrapper[4811]: I0128 17:12:34.053642 4811 generic.go:334] "Generic (PLEG): container finished" podID="f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" containerID="1a6dad311808ca0727fae58882ce830a86a4ad82dea6db2166930f8d37c5afc1" exitCode=0 Jan 28 17:12:34 crc kubenswrapper[4811]: I0128 17:12:34.053695 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44","Type":"ContainerDied","Data":"1a6dad311808ca0727fae58882ce830a86a4ad82dea6db2166930f8d37c5afc1"} Jan 28 17:12:34 crc kubenswrapper[4811]: I0128 17:12:34.053964 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44","Type":"ContainerStarted","Data":"c40525fc2676681ee6ebd88e1c9d6415280449706f0815caa17a5fd9ce1943d4"} Jan 28 17:12:34 crc kubenswrapper[4811]: I0128 17:12:34.353725 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="115a55aa-bd2e-4872-8c21-91b510f93692" path="/var/lib/kubelet/pods/115a55aa-bd2e-4872-8c21-91b510f93692/volumes" Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.352948 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.368479 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44/mariadb-client/0.log" Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.395968 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.412033 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.498226 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4xfx\" (UniqueName: \"kubernetes.io/projected/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44-kube-api-access-j4xfx\") pod \"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44\" (UID: \"f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44\") " Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.503748 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44-kube-api-access-j4xfx" (OuterVolumeSpecName: "kube-api-access-j4xfx") pod "f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" (UID: "f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44"). InnerVolumeSpecName "kube-api-access-j4xfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:12:35 crc kubenswrapper[4811]: I0128 17:12:35.600042 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4xfx\" (UniqueName: \"kubernetes.io/projected/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44-kube-api-access-j4xfx\") on node \"crc\" DevicePath \"\"" Jan 28 17:12:36 crc kubenswrapper[4811]: I0128 17:12:36.070013 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c40525fc2676681ee6ebd88e1c9d6415280449706f0815caa17a5fd9ce1943d4" Jan 28 17:12:36 crc kubenswrapper[4811]: I0128 17:12:36.070111 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 28 17:12:36 crc kubenswrapper[4811]: I0128 17:12:36.349159 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" path="/var/lib/kubelet/pods/f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44/volumes" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.916839 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 17:13:11 crc kubenswrapper[4811]: E0128 17:13:11.917754 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" containerName="mariadb-client" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.917773 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" containerName="mariadb-client" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.917947 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9d38ae0-4dc4-473b-bfc8-172cfb5f5c44" containerName="mariadb-client" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.918836 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.922188 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.922623 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.924586 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-cczm4" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.934339 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.945117 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.947206 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.967583 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.969073 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.987551 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 28 17:13:11 crc kubenswrapper[4811]: I0128 17:13:11.995656 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085064 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5096fd9b-16e4-4922-a9a3-d3341a651a6b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085115 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0628dfb9-d653-4e10-922b-7f2633499758-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085137 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7xpv\" (UniqueName: \"kubernetes.io/projected/0628dfb9-d653-4e10-922b-7f2633499758-kube-api-access-g7xpv\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085167 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03cac693-8c06-4b2f-b025-6f10db9593cf-config\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085194 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03cac693-8c06-4b2f-b025-6f10db9593cf-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085215 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f9cw\" (UniqueName: \"kubernetes.io/projected/5096fd9b-16e4-4922-a9a3-d3341a651a6b-kube-api-access-7f9cw\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085231 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085285 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5096fd9b-16e4-4922-a9a3-d3341a651a6b-config\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085321 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03cac693-8c06-4b2f-b025-6f10db9593cf-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085339 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085363 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5096fd9b-16e4-4922-a9a3-d3341a651a6b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085380 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0628dfb9-d653-4e10-922b-7f2633499758-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085395 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0628dfb9-d653-4e10-922b-7f2633499758-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085420 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0628dfb9-d653-4e10-922b-7f2633499758-config\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085454 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085474 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5096fd9b-16e4-4922-a9a3-d3341a651a6b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085488 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cac693-8c06-4b2f-b025-6f10db9593cf-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.085504 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kljkl\" (UniqueName: \"kubernetes.io/projected/03cac693-8c06-4b2f-b025-6f10db9593cf-kube-api-access-kljkl\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.124626 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.126141 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.132966 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.133256 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.136767 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-jslrg" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.142643 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.150009 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.151674 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.159517 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.161399 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.167479 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.181284 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.187868 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5096fd9b-16e4-4922-a9a3-d3341a651a6b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.187931 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0628dfb9-d653-4e10-922b-7f2633499758-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.187959 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7xpv\" (UniqueName: \"kubernetes.io/projected/0628dfb9-d653-4e10-922b-7f2633499758-kube-api-access-g7xpv\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.187998 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8130062-77f2-441c-b3a7-ee6da0fba334-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188020 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03cac693-8c06-4b2f-b025-6f10db9593cf-config\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03cac693-8c06-4b2f-b025-6f10db9593cf-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188081 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f9cw\" (UniqueName: \"kubernetes.io/projected/5096fd9b-16e4-4922-a9a3-d3341a651a6b-kube-api-access-7f9cw\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188107 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188132 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5096fd9b-16e4-4922-a9a3-d3341a651a6b-config\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188155 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8130062-77f2-441c-b3a7-ee6da0fba334-config\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188192 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03cac693-8c06-4b2f-b025-6f10db9593cf-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188220 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188254 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5096fd9b-16e4-4922-a9a3-d3341a651a6b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188326 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0628dfb9-d653-4e10-922b-7f2633499758-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188367 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0628dfb9-d653-4e10-922b-7f2633499758-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188394 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8130062-77f2-441c-b3a7-ee6da0fba334-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188441 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188483 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0628dfb9-d653-4e10-922b-7f2633499758-config\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188505 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188522 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kls44\" (UniqueName: \"kubernetes.io/projected/e8130062-77f2-441c-b3a7-ee6da0fba334-kube-api-access-kls44\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188551 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5096fd9b-16e4-4922-a9a3-d3341a651a6b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188567 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cac693-8c06-4b2f-b025-6f10db9593cf-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kljkl\" (UniqueName: \"kubernetes.io/projected/03cac693-8c06-4b2f-b025-6f10db9593cf-kube-api-access-kljkl\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.188609 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8130062-77f2-441c-b3a7-ee6da0fba334-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.189465 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0628dfb9-d653-4e10-922b-7f2633499758-config\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.189562 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5096fd9b-16e4-4922-a9a3-d3341a651a6b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.189725 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0628dfb9-d653-4e10-922b-7f2633499758-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.190294 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5096fd9b-16e4-4922-a9a3-d3341a651a6b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.190409 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03cac693-8c06-4b2f-b025-6f10db9593cf-config\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.189235 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03cac693-8c06-4b2f-b025-6f10db9593cf-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.190504 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5096fd9b-16e4-4922-a9a3-d3341a651a6b-config\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.190572 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03cac693-8c06-4b2f-b025-6f10db9593cf-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.191418 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.191467 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/280b5b9c35001eac6788bec47dec206ad29aeaee7638a8404815984a6587fe28/globalmount\"" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.191693 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0628dfb9-d653-4e10-922b-7f2633499758-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.199844 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.199893 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/aa66ac66f3a2f5669a45ee8e9cc88db2d5a93c3578c324e0827270d6a37de345/globalmount\"" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.200184 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5096fd9b-16e4-4922-a9a3-d3341a651a6b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.200508 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.200532 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/dcb6d17b7b51271c373f30cabba10593aea6a82923a7a545db547903813eccb9/globalmount\"" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.201157 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cac693-8c06-4b2f-b025-6f10db9593cf-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.207257 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0628dfb9-d653-4e10-922b-7f2633499758-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.208254 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f9cw\" (UniqueName: \"kubernetes.io/projected/5096fd9b-16e4-4922-a9a3-d3341a651a6b-kube-api-access-7f9cw\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.211198 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kljkl\" (UniqueName: \"kubernetes.io/projected/03cac693-8c06-4b2f-b025-6f10db9593cf-kube-api-access-kljkl\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.222046 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7xpv\" (UniqueName: \"kubernetes.io/projected/0628dfb9-d653-4e10-922b-7f2633499758-kube-api-access-g7xpv\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.231856 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe86784f-b7ba-4cae-bfe0-d460229a6b5a\") pod \"ovsdbserver-nb-0\" (UID: \"5096fd9b-16e4-4922-a9a3-d3341a651a6b\") " pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.233515 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba77467f-91a5-438b-a7da-40f4c2204a3a\") pod \"ovsdbserver-nb-2\" (UID: \"0628dfb9-d653-4e10-922b-7f2633499758\") " pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.234425 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b9839d1-62e4-4276-908f-86f8bcdbe1a9\") pod \"ovsdbserver-nb-1\" (UID: \"03cac693-8c06-4b2f-b025-6f10db9593cf\") " pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.278213 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290068 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1c3c5de5-c88d-4560-929b-5e947b142388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1c3c5de5-c88d-4560-929b-5e947b142388\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290141 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eed1297-7bf1-4b5a-a55d-36219d222d5e-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290205 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8130062-77f2-441c-b3a7-ee6da0fba334-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290252 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pskc4\" (UniqueName: \"kubernetes.io/projected/9eed1297-7bf1-4b5a-a55d-36219d222d5e-kube-api-access-pskc4\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290287 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eed1297-7bf1-4b5a-a55d-36219d222d5e-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290313 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8130062-77f2-441c-b3a7-ee6da0fba334-config\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290341 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-config\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290386 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290408 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eed1297-7bf1-4b5a-a55d-36219d222d5e-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290477 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8130062-77f2-441c-b3a7-ee6da0fba334-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290500 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5htsh\" (UniqueName: \"kubernetes.io/projected/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-kube-api-access-5htsh\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290533 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290563 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290591 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kls44\" (UniqueName: \"kubernetes.io/projected/e8130062-77f2-441c-b3a7-ee6da0fba334-kube-api-access-kls44\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290658 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290716 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8130062-77f2-441c-b3a7-ee6da0fba334-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290742 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eed1297-7bf1-4b5a-a55d-36219d222d5e-config\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.290784 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.291279 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8130062-77f2-441c-b3a7-ee6da0fba334-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.292069 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8130062-77f2-441c-b3a7-ee6da0fba334-config\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.293169 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.293220 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8130062-77f2-441c-b3a7-ee6da0fba334-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.293345 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.293397 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/aacf78e9ebe6f62b1758cf931b178da664a305deee515e7d1e138afd07419208/globalmount\"" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.296686 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8130062-77f2-441c-b3a7-ee6da0fba334-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.303891 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.311219 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kls44\" (UniqueName: \"kubernetes.io/projected/e8130062-77f2-441c-b3a7-ee6da0fba334-kube-api-access-kls44\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.334080 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c1b53b-2ea4-4d0d-8ed9-96a6d8ba0400\") pod \"ovsdbserver-sb-0\" (UID: \"e8130062-77f2-441c-b3a7-ee6da0fba334\") " pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.391836 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pskc4\" (UniqueName: \"kubernetes.io/projected/9eed1297-7bf1-4b5a-a55d-36219d222d5e-kube-api-access-pskc4\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392169 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eed1297-7bf1-4b5a-a55d-36219d222d5e-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392202 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-config\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392237 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392257 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eed1297-7bf1-4b5a-a55d-36219d222d5e-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392301 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5htsh\" (UniqueName: \"kubernetes.io/projected/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-kube-api-access-5htsh\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392334 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392364 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392391 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eed1297-7bf1-4b5a-a55d-36219d222d5e-config\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392419 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392476 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1c3c5de5-c88d-4560-929b-5e947b142388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1c3c5de5-c88d-4560-929b-5e947b142388\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.392502 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eed1297-7bf1-4b5a-a55d-36219d222d5e-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.393241 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-config\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.393338 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eed1297-7bf1-4b5a-a55d-36219d222d5e-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.393385 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eed1297-7bf1-4b5a-a55d-36219d222d5e-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.394228 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eed1297-7bf1-4b5a-a55d-36219d222d5e-config\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.394954 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.396287 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.396697 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eed1297-7bf1-4b5a-a55d-36219d222d5e-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.397423 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.397471 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f285493032715e83400e49c0133e2248758ba1499ac772f6b3c833693deef493/globalmount\"" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.397708 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.397736 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1c3c5de5-c88d-4560-929b-5e947b142388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1c3c5de5-c88d-4560-929b-5e947b142388\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/75b36764c6ea81b29e43a0f21371a46c8721da2736373fdffcd670d8cb0a75c2/globalmount\"" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.419490 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pskc4\" (UniqueName: \"kubernetes.io/projected/9eed1297-7bf1-4b5a-a55d-36219d222d5e-kube-api-access-pskc4\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.420336 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5htsh\" (UniqueName: \"kubernetes.io/projected/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-kube-api-access-5htsh\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.420403 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a106cdcc-25f5-4c62-aac6-1d9a4cfec240-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.447665 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.459038 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1c3c5de5-c88d-4560-929b-5e947b142388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1c3c5de5-c88d-4560-929b-5e947b142388\") pod \"ovsdbserver-sb-2\" (UID: \"a106cdcc-25f5-4c62-aac6-1d9a4cfec240\") " pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.473175 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6823ca31-a8c0-47da-a792-6a362f1ba6ce\") pod \"ovsdbserver-sb-1\" (UID: \"9eed1297-7bf1-4b5a-a55d-36219d222d5e\") " pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.483190 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.484904 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.813060 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 28 17:13:12 crc kubenswrapper[4811]: I0128 17:13:12.899719 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 28 17:13:12 crc kubenswrapper[4811]: W0128 17:13:12.909411 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03cac693_8c06_4b2f_b025_6f10db9593cf.slice/crio-788995ce907333a76c996cd89c541471559e1363c238ba69411ac5a14d1e2cc2 WatchSource:0}: Error finding container 788995ce907333a76c996cd89c541471559e1363c238ba69411ac5a14d1e2cc2: Status 404 returned error can't find the container with id 788995ce907333a76c996cd89c541471559e1363c238ba69411ac5a14d1e2cc2 Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.084386 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.390842 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e8130062-77f2-441c-b3a7-ee6da0fba334","Type":"ContainerStarted","Data":"4e77c1eba879011ac87cf84ae33af3149b8e66f8adaa122ca8d21f6915fef991"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.391258 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e8130062-77f2-441c-b3a7-ee6da0fba334","Type":"ContainerStarted","Data":"c8d36de8bce64961fd8861150f9c5e108d3d876c122eb77d93a9939e43d7f866"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.391277 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e8130062-77f2-441c-b3a7-ee6da0fba334","Type":"ContainerStarted","Data":"c93e7f2ed9362f8e99022a839754af75791aed0cdf19671c1c9033e1249b598e"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.393551 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"03cac693-8c06-4b2f-b025-6f10db9593cf","Type":"ContainerStarted","Data":"9d92ffe5ed35b5bc67aa804045f8bc61117a4e6ca5086134e7109e821ac51510"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.393615 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"03cac693-8c06-4b2f-b025-6f10db9593cf","Type":"ContainerStarted","Data":"4c272b12fcc71b9cdd64044a568444310a58a0ab48798d3b4b5a72248949bd0c"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.393628 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"03cac693-8c06-4b2f-b025-6f10db9593cf","Type":"ContainerStarted","Data":"788995ce907333a76c996cd89c541471559e1363c238ba69411ac5a14d1e2cc2"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.396024 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"5096fd9b-16e4-4922-a9a3-d3341a651a6b","Type":"ContainerStarted","Data":"a1c5ceceeed8602b563fe3d75c2470927e4674e5d7192c0f6597ba40e3ea64ad"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.396062 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"5096fd9b-16e4-4922-a9a3-d3341a651a6b","Type":"ContainerStarted","Data":"f1ab52059d8c340247236ce30c02a653dc50e7570647d64b031d464875a3abfc"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.396073 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"5096fd9b-16e4-4922-a9a3-d3341a651a6b","Type":"ContainerStarted","Data":"04cf8524c1ebc0dc27fc30cb6ae17223719108d3d134c58eb02594f4768354f6"} Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.417789 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=2.41777024 podStartE2EDuration="2.41777024s" podCreationTimestamp="2026-01-28 17:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:13.413493304 +0000 UTC m=+5286.167856907" watchObservedRunningTime="2026-01-28 17:13:13.41777024 +0000 UTC m=+5286.172133823" Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.439816 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.439797231 podStartE2EDuration="3.439797231s" podCreationTimestamp="2026-01-28 17:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:13.432378648 +0000 UTC m=+5286.186742241" watchObservedRunningTime="2026-01-28 17:13:13.439797231 +0000 UTC m=+5286.194160814" Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.453648 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.453631337 podStartE2EDuration="3.453631337s" podCreationTimestamp="2026-01-28 17:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:13.447456199 +0000 UTC m=+5286.201819802" watchObservedRunningTime="2026-01-28 17:13:13.453631337 +0000 UTC m=+5286.207994920" Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.844582 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 28 17:13:13 crc kubenswrapper[4811]: W0128 17:13:13.846062 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda106cdcc_25f5_4c62_aac6_1d9a4cfec240.slice/crio-206a2a4906fb817d359099047edd7b8ce65d7935e9185ad3e64a1961f9693d8f WatchSource:0}: Error finding container 206a2a4906fb817d359099047edd7b8ce65d7935e9185ad3e64a1961f9693d8f: Status 404 returned error can't find the container with id 206a2a4906fb817d359099047edd7b8ce65d7935e9185ad3e64a1961f9693d8f Jan 28 17:13:13 crc kubenswrapper[4811]: I0128 17:13:13.939528 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 28 17:13:13 crc kubenswrapper[4811]: W0128 17:13:13.949706 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0628dfb9_d653_4e10_922b_7f2633499758.slice/crio-471ff479bb442996a395dd578cd5df6dd243aa9b7e289edd41b49a872bac0830 WatchSource:0}: Error finding container 471ff479bb442996a395dd578cd5df6dd243aa9b7e289edd41b49a872bac0830: Status 404 returned error can't find the container with id 471ff479bb442996a395dd578cd5df6dd243aa9b7e289edd41b49a872bac0830 Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.405544 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"0628dfb9-d653-4e10-922b-7f2633499758","Type":"ContainerStarted","Data":"c2ed9771881c5a0f9b6b359ad908b32c547f71edf2cecd2672093ae0d0283253"} Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.405962 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"0628dfb9-d653-4e10-922b-7f2633499758","Type":"ContainerStarted","Data":"6f53ecff2ac41d05c077c2914d1f403dc5a845a4f07cbd319365ad4fe447ac7d"} Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.405978 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"0628dfb9-d653-4e10-922b-7f2633499758","Type":"ContainerStarted","Data":"471ff479bb442996a395dd578cd5df6dd243aa9b7e289edd41b49a872bac0830"} Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.408519 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a106cdcc-25f5-4c62-aac6-1d9a4cfec240","Type":"ContainerStarted","Data":"df8cdb1db586c05dcb6356f4fe3a4567a0a8fd91024c236ba234e0767535b208"} Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.408597 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a106cdcc-25f5-4c62-aac6-1d9a4cfec240","Type":"ContainerStarted","Data":"10cf7765e973a1c5e2580994a57f9abc3133955ab7e030f338eb6fad753a4248"} Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.408611 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a106cdcc-25f5-4c62-aac6-1d9a4cfec240","Type":"ContainerStarted","Data":"206a2a4906fb817d359099047edd7b8ce65d7935e9185ad3e64a1961f9693d8f"} Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.425369 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.425344952 podStartE2EDuration="4.425344952s" podCreationTimestamp="2026-01-28 17:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:14.421163638 +0000 UTC m=+5287.175527241" watchObservedRunningTime="2026-01-28 17:13:14.425344952 +0000 UTC m=+5287.179708535" Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.441419 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.441400489 podStartE2EDuration="3.441400489s" podCreationTimestamp="2026-01-28 17:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:14.435605041 +0000 UTC m=+5287.189968624" watchObservedRunningTime="2026-01-28 17:13:14.441400489 +0000 UTC m=+5287.195764072" Jan 28 17:13:14 crc kubenswrapper[4811]: I0128 17:13:14.963036 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 28 17:13:14 crc kubenswrapper[4811]: W0128 17:13:14.970603 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9eed1297_7bf1_4b5a_a55d_36219d222d5e.slice/crio-de9c4826d4c2621b3699b07f6bde66912759d6d6a3eab7f2a01b4a1e973e6184 WatchSource:0}: Error finding container de9c4826d4c2621b3699b07f6bde66912759d6d6a3eab7f2a01b4a1e973e6184: Status 404 returned error can't find the container with id de9c4826d4c2621b3699b07f6bde66912759d6d6a3eab7f2a01b4a1e973e6184 Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.278617 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.294047 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.304955 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.421680 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"9eed1297-7bf1-4b5a-a55d-36219d222d5e","Type":"ContainerStarted","Data":"7a9a3dc0caf4ae1dd561de20c63a3126db1509aa2cdcd5121ac4fdfcd9a657da"} Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.421720 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"9eed1297-7bf1-4b5a-a55d-36219d222d5e","Type":"ContainerStarted","Data":"ea95425d6fdb7be0d8a95e8600b280b832a167674c19a2741487af4f123dae3c"} Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.421734 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"9eed1297-7bf1-4b5a-a55d-36219d222d5e","Type":"ContainerStarted","Data":"de9c4826d4c2621b3699b07f6bde66912759d6d6a3eab7f2a01b4a1e973e6184"} Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.442169 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.442150614 podStartE2EDuration="4.442150614s" podCreationTimestamp="2026-01-28 17:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:15.436833419 +0000 UTC m=+5288.191197002" watchObservedRunningTime="2026-01-28 17:13:15.442150614 +0000 UTC m=+5288.196514217" Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.449511 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.484288 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:15 crc kubenswrapper[4811]: I0128 17:13:15.485476 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:17 crc kubenswrapper[4811]: I0128 17:13:17.278985 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:17 crc kubenswrapper[4811]: I0128 17:13:17.293601 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:17 crc kubenswrapper[4811]: I0128 17:13:17.304558 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:17 crc kubenswrapper[4811]: I0128 17:13:17.449918 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:17 crc kubenswrapper[4811]: I0128 17:13:17.483804 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:17 crc kubenswrapper[4811]: I0128 17:13:17.485982 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.333020 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.335752 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.360186 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.375577 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.379986 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.488134 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.546812 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.546934 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.549027 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.640904 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-668769d4f5-p8fsw"] Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.666377 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.666293 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-668769d4f5-p8fsw"] Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.673558 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.712407 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krdkg\" (UniqueName: \"kubernetes.io/projected/d377d56c-3df1-4e24-91c7-3832238b2ffe-kube-api-access-krdkg\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.712501 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-ovsdbserver-nb\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.712578 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-config\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.712855 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-dns-svc\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.814761 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-ovsdbserver-nb\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.814874 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-config\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.814957 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-dns-svc\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.815006 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krdkg\" (UniqueName: \"kubernetes.io/projected/d377d56c-3df1-4e24-91c7-3832238b2ffe-kube-api-access-krdkg\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.815928 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-ovsdbserver-nb\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.815940 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-dns-svc\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.816542 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-config\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.837868 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krdkg\" (UniqueName: \"kubernetes.io/projected/d377d56c-3df1-4e24-91c7-3832238b2ffe-kube-api-access-krdkg\") pod \"dnsmasq-dns-668769d4f5-p8fsw\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.920014 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-668769d4f5-p8fsw"] Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.920753 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.949609 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554585dc-mbttd"] Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.956599 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.958367 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 28 17:13:18 crc kubenswrapper[4811]: I0128 17:13:18.959631 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554585dc-mbttd"] Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.017475 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5fmv\" (UniqueName: \"kubernetes.io/projected/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-kube-api-access-z5fmv\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.017531 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-sb\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.017557 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-config\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.017594 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-dns-svc\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.017608 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-nb\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.118112 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-dns-svc\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.118406 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-nb\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.118493 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5fmv\" (UniqueName: \"kubernetes.io/projected/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-kube-api-access-z5fmv\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.118530 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-sb\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.118552 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-config\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.119273 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-config\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.119935 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-dns-svc\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.120344 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-nb\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.120370 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-sb\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.146797 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5fmv\" (UniqueName: \"kubernetes.io/projected/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-kube-api-access-z5fmv\") pod \"dnsmasq-dns-8554585dc-mbttd\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.344559 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.370584 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-668769d4f5-p8fsw"] Jan 28 17:13:19 crc kubenswrapper[4811]: W0128 17:13:19.377143 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd377d56c_3df1_4e24_91c7_3832238b2ffe.slice/crio-a0db60bab250556a6b82f9a5d303b8a7ff6b05bd8cc3d6068801c4dcee42359a WatchSource:0}: Error finding container a0db60bab250556a6b82f9a5d303b8a7ff6b05bd8cc3d6068801c4dcee42359a: Status 404 returned error can't find the container with id a0db60bab250556a6b82f9a5d303b8a7ff6b05bd8cc3d6068801c4dcee42359a Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.456709 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" event={"ID":"d377d56c-3df1-4e24-91c7-3832238b2ffe","Type":"ContainerStarted","Data":"a0db60bab250556a6b82f9a5d303b8a7ff6b05bd8cc3d6068801c4dcee42359a"} Jan 28 17:13:19 crc kubenswrapper[4811]: I0128 17:13:19.790373 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554585dc-mbttd"] Jan 28 17:13:19 crc kubenswrapper[4811]: W0128 17:13:19.799776 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfd82619_c9bd_4c64_a3a4_dd6b85b48126.slice/crio-8632d91695c0cb987e8895d8160146f12a3fa7ec93273555e77b921773dc5527 WatchSource:0}: Error finding container 8632d91695c0cb987e8895d8160146f12a3fa7ec93273555e77b921773dc5527: Status 404 returned error can't find the container with id 8632d91695c0cb987e8895d8160146f12a3fa7ec93273555e77b921773dc5527 Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.465649 4811 generic.go:334] "Generic (PLEG): container finished" podID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerID="4622ad9afdb33a29347f45c62674af5be3a37f9892085baf08f97403153ff8a1" exitCode=0 Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.465937 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554585dc-mbttd" event={"ID":"bfd82619-c9bd-4c64-a3a4-dd6b85b48126","Type":"ContainerDied","Data":"4622ad9afdb33a29347f45c62674af5be3a37f9892085baf08f97403153ff8a1"} Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.465967 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554585dc-mbttd" event={"ID":"bfd82619-c9bd-4c64-a3a4-dd6b85b48126","Type":"ContainerStarted","Data":"8632d91695c0cb987e8895d8160146f12a3fa7ec93273555e77b921773dc5527"} Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.468789 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" event={"ID":"d377d56c-3df1-4e24-91c7-3832238b2ffe","Type":"ContainerDied","Data":"bda5c980a39f02f63a7d73b86b8ba5eabd4341ae8454c7126dba36962bac03cb"} Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.468802 4811 generic.go:334] "Generic (PLEG): container finished" podID="d377d56c-3df1-4e24-91c7-3832238b2ffe" containerID="bda5c980a39f02f63a7d73b86b8ba5eabd4341ae8454c7126dba36962bac03cb" exitCode=0 Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.728976 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.841772 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-ovsdbserver-nb\") pod \"d377d56c-3df1-4e24-91c7-3832238b2ffe\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.841841 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-config\") pod \"d377d56c-3df1-4e24-91c7-3832238b2ffe\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.841891 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krdkg\" (UniqueName: \"kubernetes.io/projected/d377d56c-3df1-4e24-91c7-3832238b2ffe-kube-api-access-krdkg\") pod \"d377d56c-3df1-4e24-91c7-3832238b2ffe\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.841912 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-dns-svc\") pod \"d377d56c-3df1-4e24-91c7-3832238b2ffe\" (UID: \"d377d56c-3df1-4e24-91c7-3832238b2ffe\") " Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.847421 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d377d56c-3df1-4e24-91c7-3832238b2ffe-kube-api-access-krdkg" (OuterVolumeSpecName: "kube-api-access-krdkg") pod "d377d56c-3df1-4e24-91c7-3832238b2ffe" (UID: "d377d56c-3df1-4e24-91c7-3832238b2ffe"). InnerVolumeSpecName "kube-api-access-krdkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.861238 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d377d56c-3df1-4e24-91c7-3832238b2ffe" (UID: "d377d56c-3df1-4e24-91c7-3832238b2ffe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.861845 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d377d56c-3df1-4e24-91c7-3832238b2ffe" (UID: "d377d56c-3df1-4e24-91c7-3832238b2ffe"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.862696 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-config" (OuterVolumeSpecName: "config") pod "d377d56c-3df1-4e24-91c7-3832238b2ffe" (UID: "d377d56c-3df1-4e24-91c7-3832238b2ffe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.943554 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.943623 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.943635 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krdkg\" (UniqueName: \"kubernetes.io/projected/d377d56c-3df1-4e24-91c7-3832238b2ffe-kube-api-access-krdkg\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:20 crc kubenswrapper[4811]: I0128 17:13:20.943647 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d377d56c-3df1-4e24-91c7-3832238b2ffe-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.478831 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.478821 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668769d4f5-p8fsw" event={"ID":"d377d56c-3df1-4e24-91c7-3832238b2ffe","Type":"ContainerDied","Data":"a0db60bab250556a6b82f9a5d303b8a7ff6b05bd8cc3d6068801c4dcee42359a"} Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.478975 4811 scope.go:117] "RemoveContainer" containerID="bda5c980a39f02f63a7d73b86b8ba5eabd4341ae8454c7126dba36962bac03cb" Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.482771 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554585dc-mbttd" event={"ID":"bfd82619-c9bd-4c64-a3a4-dd6b85b48126","Type":"ContainerStarted","Data":"d595342cfabbc9b91f4fdbf9dc72ea898825c6fd1e6f1419adb950fe2bedc8fd"} Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.483414 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.508932 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554585dc-mbttd" podStartSLOduration=3.508914072 podStartE2EDuration="3.508914072s" podCreationTimestamp="2026-01-28 17:13:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:21.505857189 +0000 UTC m=+5294.260220782" watchObservedRunningTime="2026-01-28 17:13:21.508914072 +0000 UTC m=+5294.263277655" Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.582306 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-668769d4f5-p8fsw"] Jan 28 17:13:21 crc kubenswrapper[4811]: I0128 17:13:21.588900 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-668769d4f5-p8fsw"] Jan 28 17:13:22 crc kubenswrapper[4811]: I0128 17:13:22.349375 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d377d56c-3df1-4e24-91c7-3832238b2ffe" path="/var/lib/kubelet/pods/d377d56c-3df1-4e24-91c7-3832238b2ffe/volumes" Jan 28 17:13:22 crc kubenswrapper[4811]: I0128 17:13:22.350622 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Jan 28 17:13:22 crc kubenswrapper[4811]: I0128 17:13:22.521031 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Jan 28 17:13:22 crc kubenswrapper[4811]: I0128 17:13:22.523622 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Jan 28 17:13:24 crc kubenswrapper[4811]: I0128 17:13:24.929395 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Jan 28 17:13:24 crc kubenswrapper[4811]: E0128 17:13:24.930726 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d377d56c-3df1-4e24-91c7-3832238b2ffe" containerName="init" Jan 28 17:13:24 crc kubenswrapper[4811]: I0128 17:13:24.930755 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d377d56c-3df1-4e24-91c7-3832238b2ffe" containerName="init" Jan 28 17:13:24 crc kubenswrapper[4811]: I0128 17:13:24.931065 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d377d56c-3df1-4e24-91c7-3832238b2ffe" containerName="init" Jan 28 17:13:24 crc kubenswrapper[4811]: I0128 17:13:24.932032 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 28 17:13:24 crc kubenswrapper[4811]: I0128 17:13:24.935923 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Jan 28 17:13:24 crc kubenswrapper[4811]: I0128 17:13:24.956720 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.012271 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/3a0ba99c-11cb-4141-b009-c28e97e90203-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.012359 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.012408 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-685s2\" (UniqueName: \"kubernetes.io/projected/3a0ba99c-11cb-4141-b009-c28e97e90203-kube-api-access-685s2\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.113926 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/3a0ba99c-11cb-4141-b009-c28e97e90203-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.114052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.114110 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-685s2\" (UniqueName: \"kubernetes.io/projected/3a0ba99c-11cb-4141-b009-c28e97e90203-kube-api-access-685s2\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.117767 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.117870 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2f580c444bcfc133ed241c8825864029847f1d5eed311df5f70281201421ec91/globalmount\"" pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.122169 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/3a0ba99c-11cb-4141-b009-c28e97e90203-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.137402 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-685s2\" (UniqueName: \"kubernetes.io/projected/3a0ba99c-11cb-4141-b009-c28e97e90203-kube-api-access-685s2\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.152984 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91252e19-9c58-4a3e-b98f-6f9217fcfcfc\") pod \"ovn-copy-data\" (UID: \"3a0ba99c-11cb-4141-b009-c28e97e90203\") " pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.263570 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 28 17:13:25 crc kubenswrapper[4811]: W0128 17:13:25.815639 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a0ba99c_11cb_4141_b009_c28e97e90203.slice/crio-54a97e20035688afd859212e2305e91162aad27b48b08aba2af4038baebf32b4 WatchSource:0}: Error finding container 54a97e20035688afd859212e2305e91162aad27b48b08aba2af4038baebf32b4: Status 404 returned error can't find the container with id 54a97e20035688afd859212e2305e91162aad27b48b08aba2af4038baebf32b4 Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.816417 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 28 17:13:25 crc kubenswrapper[4811]: I0128 17:13:25.818977 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:13:26 crc kubenswrapper[4811]: I0128 17:13:26.523047 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"3a0ba99c-11cb-4141-b009-c28e97e90203","Type":"ContainerStarted","Data":"db6c7904814c87b1d1bc4df75340e3269e2f0ade2624542286a806e6cc2b5a4b"} Jan 28 17:13:26 crc kubenswrapper[4811]: I0128 17:13:26.523493 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"3a0ba99c-11cb-4141-b009-c28e97e90203","Type":"ContainerStarted","Data":"54a97e20035688afd859212e2305e91162aad27b48b08aba2af4038baebf32b4"} Jan 28 17:13:26 crc kubenswrapper[4811]: I0128 17:13:26.544385 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.054212603 podStartE2EDuration="3.544365302s" podCreationTimestamp="2026-01-28 17:13:23 +0000 UTC" firstStartedPulling="2026-01-28 17:13:25.818699349 +0000 UTC m=+5298.573062952" lastFinishedPulling="2026-01-28 17:13:26.308852068 +0000 UTC m=+5299.063215651" observedRunningTime="2026-01-28 17:13:26.536342213 +0000 UTC m=+5299.290705796" watchObservedRunningTime="2026-01-28 17:13:26.544365302 +0000 UTC m=+5299.298728875" Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.346860 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.401998 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-r79rf"] Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.402343 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-699964fbc-r79rf" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerName="dnsmasq-dns" containerID="cri-o://490d4c0fe896d9635f66c94e540357ca1dd9b2290afdfcdcade8a62a7d524aeb" gracePeriod=10 Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.553311 4811 generic.go:334] "Generic (PLEG): container finished" podID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerID="490d4c0fe896d9635f66c94e540357ca1dd9b2290afdfcdcade8a62a7d524aeb" exitCode=0 Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.553360 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-r79rf" event={"ID":"073b884f-1b51-425e-a2eb-5e7e137a58d1","Type":"ContainerDied","Data":"490d4c0fe896d9635f66c94e540357ca1dd9b2290afdfcdcade8a62a7d524aeb"} Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.871739 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.999099 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-dns-svc\") pod \"073b884f-1b51-425e-a2eb-5e7e137a58d1\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.999272 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm4xw\" (UniqueName: \"kubernetes.io/projected/073b884f-1b51-425e-a2eb-5e7e137a58d1-kube-api-access-sm4xw\") pod \"073b884f-1b51-425e-a2eb-5e7e137a58d1\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " Jan 28 17:13:29 crc kubenswrapper[4811]: I0128 17:13:29.999378 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-config\") pod \"073b884f-1b51-425e-a2eb-5e7e137a58d1\" (UID: \"073b884f-1b51-425e-a2eb-5e7e137a58d1\") " Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.005800 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/073b884f-1b51-425e-a2eb-5e7e137a58d1-kube-api-access-sm4xw" (OuterVolumeSpecName: "kube-api-access-sm4xw") pod "073b884f-1b51-425e-a2eb-5e7e137a58d1" (UID: "073b884f-1b51-425e-a2eb-5e7e137a58d1"). InnerVolumeSpecName "kube-api-access-sm4xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.055656 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-config" (OuterVolumeSpecName: "config") pod "073b884f-1b51-425e-a2eb-5e7e137a58d1" (UID: "073b884f-1b51-425e-a2eb-5e7e137a58d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.065502 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "073b884f-1b51-425e-a2eb-5e7e137a58d1" (UID: "073b884f-1b51-425e-a2eb-5e7e137a58d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.101771 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm4xw\" (UniqueName: \"kubernetes.io/projected/073b884f-1b51-425e-a2eb-5e7e137a58d1-kube-api-access-sm4xw\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.102007 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.102021 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/073b884f-1b51-425e-a2eb-5e7e137a58d1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.369494 4811 scope.go:117] "RemoveContainer" containerID="7b70eaff5d2cf822dc041050e95eda7d0e2b9c322f3a51f5919a91942370b5f6" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.392588 4811 scope.go:117] "RemoveContainer" containerID="490d4c0fe896d9635f66c94e540357ca1dd9b2290afdfcdcade8a62a7d524aeb" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.558605 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-r79rf" Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.559001 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-r79rf" event={"ID":"073b884f-1b51-425e-a2eb-5e7e137a58d1","Type":"ContainerDied","Data":"8c5f90a4f0a2cf04094eeabd88c420ded5a4051a937fdcb2b4af9fd251d0faf4"} Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.591333 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-r79rf"] Jan 28 17:13:30 crc kubenswrapper[4811]: I0128 17:13:30.597313 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-r79rf"] Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.312949 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 28 17:13:31 crc kubenswrapper[4811]: E0128 17:13:31.313354 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerName="dnsmasq-dns" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.313381 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerName="dnsmasq-dns" Jan 28 17:13:31 crc kubenswrapper[4811]: E0128 17:13:31.313401 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerName="init" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.313411 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerName="init" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.313630 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" containerName="dnsmasq-dns" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.314616 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.317364 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.321104 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-d67sd" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.321198 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.333142 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.422446 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqxrg\" (UniqueName: \"kubernetes.io/projected/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-kube-api-access-vqxrg\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.422763 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-config\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.422795 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.422932 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-scripts\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.423024 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.524606 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqxrg\" (UniqueName: \"kubernetes.io/projected/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-kube-api-access-vqxrg\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.524672 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-config\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.524712 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.524784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-scripts\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.524827 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.525845 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-config\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.526189 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-scripts\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.526770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.532608 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.545762 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqxrg\" (UniqueName: \"kubernetes.io/projected/5c66a38a-138b-4a1a-a44e-fe517ad7ae99-kube-api-access-vqxrg\") pod \"ovn-northd-0\" (UID: \"5c66a38a-138b-4a1a-a44e-fe517ad7ae99\") " pod="openstack/ovn-northd-0" Jan 28 17:13:31 crc kubenswrapper[4811]: I0128 17:13:31.632631 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.039688 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 28 17:13:32 crc kubenswrapper[4811]: W0128 17:13:32.042844 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c66a38a_138b_4a1a_a44e_fe517ad7ae99.slice/crio-08b056ac8b78cd0772e512076a906d502e3b265f8a91e742ed80fd0ee0f83e1c WatchSource:0}: Error finding container 08b056ac8b78cd0772e512076a906d502e3b265f8a91e742ed80fd0ee0f83e1c: Status 404 returned error can't find the container with id 08b056ac8b78cd0772e512076a906d502e3b265f8a91e742ed80fd0ee0f83e1c Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.351584 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="073b884f-1b51-425e-a2eb-5e7e137a58d1" path="/var/lib/kubelet/pods/073b884f-1b51-425e-a2eb-5e7e137a58d1/volumes" Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.577914 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5c66a38a-138b-4a1a-a44e-fe517ad7ae99","Type":"ContainerStarted","Data":"4a0da680748f97d9e99ffe44fb1406a44ccad1b81994d864ab7d07b2621e5fe3"} Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.577963 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5c66a38a-138b-4a1a-a44e-fe517ad7ae99","Type":"ContainerStarted","Data":"dcb3c4636f7cd3ab55f0a2050320396a175afd22fb609741672308f60f4c1e68"} Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.577974 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5c66a38a-138b-4a1a-a44e-fe517ad7ae99","Type":"ContainerStarted","Data":"08b056ac8b78cd0772e512076a906d502e3b265f8a91e742ed80fd0ee0f83e1c"} Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.578077 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 28 17:13:32 crc kubenswrapper[4811]: I0128 17:13:32.602077 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.602060144 podStartE2EDuration="1.602060144s" podCreationTimestamp="2026-01-28 17:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:32.601022826 +0000 UTC m=+5305.355386409" watchObservedRunningTime="2026-01-28 17:13:32.602060144 +0000 UTC m=+5305.356423727" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.511239 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-b9qzk"] Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.513065 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.539090 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b9qzk"] Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.592810 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx4mg\" (UniqueName: \"kubernetes.io/projected/ebe6770a-3eae-4a81-82fe-394dbf30b626-kube-api-access-vx4mg\") pod \"keystone-db-create-b9qzk\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.592977 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe6770a-3eae-4a81-82fe-394dbf30b626-operator-scripts\") pod \"keystone-db-create-b9qzk\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.612871 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bf7c-account-create-update-mtvzp"] Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.613940 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.616532 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.622835 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bf7c-account-create-update-mtvzp"] Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.698555 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqkbw\" (UniqueName: \"kubernetes.io/projected/94b13d6f-c55f-4343-a54b-c40781c41409-kube-api-access-rqkbw\") pod \"keystone-bf7c-account-create-update-mtvzp\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.698673 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe6770a-3eae-4a81-82fe-394dbf30b626-operator-scripts\") pod \"keystone-db-create-b9qzk\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.698914 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94b13d6f-c55f-4343-a54b-c40781c41409-operator-scripts\") pod \"keystone-bf7c-account-create-update-mtvzp\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.699016 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx4mg\" (UniqueName: \"kubernetes.io/projected/ebe6770a-3eae-4a81-82fe-394dbf30b626-kube-api-access-vx4mg\") pod \"keystone-db-create-b9qzk\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.699396 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe6770a-3eae-4a81-82fe-394dbf30b626-operator-scripts\") pod \"keystone-db-create-b9qzk\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.720560 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx4mg\" (UniqueName: \"kubernetes.io/projected/ebe6770a-3eae-4a81-82fe-394dbf30b626-kube-api-access-vx4mg\") pod \"keystone-db-create-b9qzk\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.800640 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94b13d6f-c55f-4343-a54b-c40781c41409-operator-scripts\") pod \"keystone-bf7c-account-create-update-mtvzp\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.800721 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqkbw\" (UniqueName: \"kubernetes.io/projected/94b13d6f-c55f-4343-a54b-c40781c41409-kube-api-access-rqkbw\") pod \"keystone-bf7c-account-create-update-mtvzp\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.801727 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94b13d6f-c55f-4343-a54b-c40781c41409-operator-scripts\") pod \"keystone-bf7c-account-create-update-mtvzp\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.818623 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqkbw\" (UniqueName: \"kubernetes.io/projected/94b13d6f-c55f-4343-a54b-c40781c41409-kube-api-access-rqkbw\") pod \"keystone-bf7c-account-create-update-mtvzp\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.851037 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:36 crc kubenswrapper[4811]: I0128 17:13:36.931814 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:37 crc kubenswrapper[4811]: I0128 17:13:37.377022 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b9qzk"] Jan 28 17:13:37 crc kubenswrapper[4811]: W0128 17:13:37.383188 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebe6770a_3eae_4a81_82fe_394dbf30b626.slice/crio-8734db365cebc40d24652b30f2573d3278ea02bbf47805f6b09bd9fe8bd855c2 WatchSource:0}: Error finding container 8734db365cebc40d24652b30f2573d3278ea02bbf47805f6b09bd9fe8bd855c2: Status 404 returned error can't find the container with id 8734db365cebc40d24652b30f2573d3278ea02bbf47805f6b09bd9fe8bd855c2 Jan 28 17:13:37 crc kubenswrapper[4811]: I0128 17:13:37.457217 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bf7c-account-create-update-mtvzp"] Jan 28 17:13:37 crc kubenswrapper[4811]: W0128 17:13:37.466734 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94b13d6f_c55f_4343_a54b_c40781c41409.slice/crio-deb27a8ba3a4d9076b3cee526b45cb39bc9ccba873e327b67beaf05988970b30 WatchSource:0}: Error finding container deb27a8ba3a4d9076b3cee526b45cb39bc9ccba873e327b67beaf05988970b30: Status 404 returned error can't find the container with id deb27a8ba3a4d9076b3cee526b45cb39bc9ccba873e327b67beaf05988970b30 Jan 28 17:13:37 crc kubenswrapper[4811]: I0128 17:13:37.841343 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bf7c-account-create-update-mtvzp" event={"ID":"94b13d6f-c55f-4343-a54b-c40781c41409","Type":"ContainerStarted","Data":"deb27a8ba3a4d9076b3cee526b45cb39bc9ccba873e327b67beaf05988970b30"} Jan 28 17:13:37 crc kubenswrapper[4811]: I0128 17:13:37.842926 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b9qzk" event={"ID":"ebe6770a-3eae-4a81-82fe-394dbf30b626","Type":"ContainerStarted","Data":"8734db365cebc40d24652b30f2573d3278ea02bbf47805f6b09bd9fe8bd855c2"} Jan 28 17:13:44 crc kubenswrapper[4811]: I0128 17:13:44.896800 4811 generic.go:334] "Generic (PLEG): container finished" podID="94b13d6f-c55f-4343-a54b-c40781c41409" containerID="77ff0980bc0f00ba861486350bd2b1ed7ad23137d7b7f6e986656c052ea6fbad" exitCode=0 Jan 28 17:13:44 crc kubenswrapper[4811]: I0128 17:13:44.896910 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bf7c-account-create-update-mtvzp" event={"ID":"94b13d6f-c55f-4343-a54b-c40781c41409","Type":"ContainerDied","Data":"77ff0980bc0f00ba861486350bd2b1ed7ad23137d7b7f6e986656c052ea6fbad"} Jan 28 17:13:44 crc kubenswrapper[4811]: I0128 17:13:44.899784 4811 generic.go:334] "Generic (PLEG): container finished" podID="ebe6770a-3eae-4a81-82fe-394dbf30b626" containerID="2e8d987cd6f83c706156d803de1081f5bfc5ec2df9fc7be106eded1131d5e417" exitCode=0 Jan 28 17:13:44 crc kubenswrapper[4811]: I0128 17:13:44.899830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b9qzk" event={"ID":"ebe6770a-3eae-4a81-82fe-394dbf30b626","Type":"ContainerDied","Data":"2e8d987cd6f83c706156d803de1081f5bfc5ec2df9fc7be106eded1131d5e417"} Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.346786 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.361231 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.459146 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94b13d6f-c55f-4343-a54b-c40781c41409-operator-scripts\") pod \"94b13d6f-c55f-4343-a54b-c40781c41409\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.459266 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqkbw\" (UniqueName: \"kubernetes.io/projected/94b13d6f-c55f-4343-a54b-c40781c41409-kube-api-access-rqkbw\") pod \"94b13d6f-c55f-4343-a54b-c40781c41409\" (UID: \"94b13d6f-c55f-4343-a54b-c40781c41409\") " Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.459331 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe6770a-3eae-4a81-82fe-394dbf30b626-operator-scripts\") pod \"ebe6770a-3eae-4a81-82fe-394dbf30b626\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.459480 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx4mg\" (UniqueName: \"kubernetes.io/projected/ebe6770a-3eae-4a81-82fe-394dbf30b626-kube-api-access-vx4mg\") pod \"ebe6770a-3eae-4a81-82fe-394dbf30b626\" (UID: \"ebe6770a-3eae-4a81-82fe-394dbf30b626\") " Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.460341 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebe6770a-3eae-4a81-82fe-394dbf30b626-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ebe6770a-3eae-4a81-82fe-394dbf30b626" (UID: "ebe6770a-3eae-4a81-82fe-394dbf30b626"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.461272 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94b13d6f-c55f-4343-a54b-c40781c41409-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "94b13d6f-c55f-4343-a54b-c40781c41409" (UID: "94b13d6f-c55f-4343-a54b-c40781c41409"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.471034 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94b13d6f-c55f-4343-a54b-c40781c41409-kube-api-access-rqkbw" (OuterVolumeSpecName: "kube-api-access-rqkbw") pod "94b13d6f-c55f-4343-a54b-c40781c41409" (UID: "94b13d6f-c55f-4343-a54b-c40781c41409"). InnerVolumeSpecName "kube-api-access-rqkbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.471146 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe6770a-3eae-4a81-82fe-394dbf30b626-kube-api-access-vx4mg" (OuterVolumeSpecName: "kube-api-access-vx4mg") pod "ebe6770a-3eae-4a81-82fe-394dbf30b626" (UID: "ebe6770a-3eae-4a81-82fe-394dbf30b626"). InnerVolumeSpecName "kube-api-access-vx4mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.560923 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe6770a-3eae-4a81-82fe-394dbf30b626-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.560961 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx4mg\" (UniqueName: \"kubernetes.io/projected/ebe6770a-3eae-4a81-82fe-394dbf30b626-kube-api-access-vx4mg\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.560972 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94b13d6f-c55f-4343-a54b-c40781c41409-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.561169 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqkbw\" (UniqueName: \"kubernetes.io/projected/94b13d6f-c55f-4343-a54b-c40781c41409-kube-api-access-rqkbw\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.915337 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b9qzk" event={"ID":"ebe6770a-3eae-4a81-82fe-394dbf30b626","Type":"ContainerDied","Data":"8734db365cebc40d24652b30f2573d3278ea02bbf47805f6b09bd9fe8bd855c2"} Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.915385 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8734db365cebc40d24652b30f2573d3278ea02bbf47805f6b09bd9fe8bd855c2" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.915359 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b9qzk" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.916663 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bf7c-account-create-update-mtvzp" event={"ID":"94b13d6f-c55f-4343-a54b-c40781c41409","Type":"ContainerDied","Data":"deb27a8ba3a4d9076b3cee526b45cb39bc9ccba873e327b67beaf05988970b30"} Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.916692 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="deb27a8ba3a4d9076b3cee526b45cb39bc9ccba873e327b67beaf05988970b30" Jan 28 17:13:46 crc kubenswrapper[4811]: I0128 17:13:46.916702 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf7c-account-create-update-mtvzp" Jan 28 17:13:51 crc kubenswrapper[4811]: I0128 17:13:51.692247 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.145612 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qzb7h"] Jan 28 17:13:52 crc kubenswrapper[4811]: E0128 17:13:52.145913 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b13d6f-c55f-4343-a54b-c40781c41409" containerName="mariadb-account-create-update" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.145928 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b13d6f-c55f-4343-a54b-c40781c41409" containerName="mariadb-account-create-update" Jan 28 17:13:52 crc kubenswrapper[4811]: E0128 17:13:52.145958 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe6770a-3eae-4a81-82fe-394dbf30b626" containerName="mariadb-database-create" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.145964 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe6770a-3eae-4a81-82fe-394dbf30b626" containerName="mariadb-database-create" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.146099 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="94b13d6f-c55f-4343-a54b-c40781c41409" containerName="mariadb-account-create-update" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.146112 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe6770a-3eae-4a81-82fe-394dbf30b626" containerName="mariadb-database-create" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.146673 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.150003 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.150187 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n898w" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.150334 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.158717 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qzb7h"] Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.159050 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.261307 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-config-data\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.261488 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grp9d\" (UniqueName: \"kubernetes.io/projected/466b541a-ec7d-4826-947e-c57617810071-kube-api-access-grp9d\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.261651 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-combined-ca-bundle\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.362730 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grp9d\" (UniqueName: \"kubernetes.io/projected/466b541a-ec7d-4826-947e-c57617810071-kube-api-access-grp9d\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.362850 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-combined-ca-bundle\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.362882 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-config-data\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.368405 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-combined-ca-bundle\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.368494 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-config-data\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.382896 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grp9d\" (UniqueName: \"kubernetes.io/projected/466b541a-ec7d-4826-947e-c57617810071-kube-api-access-grp9d\") pod \"keystone-db-sync-qzb7h\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:52 crc kubenswrapper[4811]: I0128 17:13:52.470811 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:53 crc kubenswrapper[4811]: I0128 17:13:53.001938 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qzb7h"] Jan 28 17:13:53 crc kubenswrapper[4811]: I0128 17:13:53.965261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qzb7h" event={"ID":"466b541a-ec7d-4826-947e-c57617810071","Type":"ContainerStarted","Data":"624824b7668483f016b76d55e52975342301626cacae5e5277d127d1ef1dc302"} Jan 28 17:13:53 crc kubenswrapper[4811]: I0128 17:13:53.965623 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qzb7h" event={"ID":"466b541a-ec7d-4826-947e-c57617810071","Type":"ContainerStarted","Data":"603113591e6d100214610284fa42bfbe7a0103752fa6e498e5858b029dd43045"} Jan 28 17:13:54 crc kubenswrapper[4811]: I0128 17:13:54.974391 4811 generic.go:334] "Generic (PLEG): container finished" podID="466b541a-ec7d-4826-947e-c57617810071" containerID="624824b7668483f016b76d55e52975342301626cacae5e5277d127d1ef1dc302" exitCode=0 Jan 28 17:13:54 crc kubenswrapper[4811]: I0128 17:13:54.974456 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qzb7h" event={"ID":"466b541a-ec7d-4826-947e-c57617810071","Type":"ContainerDied","Data":"624824b7668483f016b76d55e52975342301626cacae5e5277d127d1ef1dc302"} Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.333559 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.429587 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-combined-ca-bundle\") pod \"466b541a-ec7d-4826-947e-c57617810071\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.429651 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-config-data\") pod \"466b541a-ec7d-4826-947e-c57617810071\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.429780 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grp9d\" (UniqueName: \"kubernetes.io/projected/466b541a-ec7d-4826-947e-c57617810071-kube-api-access-grp9d\") pod \"466b541a-ec7d-4826-947e-c57617810071\" (UID: \"466b541a-ec7d-4826-947e-c57617810071\") " Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.436814 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/466b541a-ec7d-4826-947e-c57617810071-kube-api-access-grp9d" (OuterVolumeSpecName: "kube-api-access-grp9d") pod "466b541a-ec7d-4826-947e-c57617810071" (UID: "466b541a-ec7d-4826-947e-c57617810071"). InnerVolumeSpecName "kube-api-access-grp9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.455791 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "466b541a-ec7d-4826-947e-c57617810071" (UID: "466b541a-ec7d-4826-947e-c57617810071"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.479621 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-config-data" (OuterVolumeSpecName: "config-data") pod "466b541a-ec7d-4826-947e-c57617810071" (UID: "466b541a-ec7d-4826-947e-c57617810071"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.531359 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.531399 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466b541a-ec7d-4826-947e-c57617810071-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.531408 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grp9d\" (UniqueName: \"kubernetes.io/projected/466b541a-ec7d-4826-947e-c57617810071-kube-api-access-grp9d\") on node \"crc\" DevicePath \"\"" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.989713 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qzb7h" event={"ID":"466b541a-ec7d-4826-947e-c57617810071","Type":"ContainerDied","Data":"603113591e6d100214610284fa42bfbe7a0103752fa6e498e5858b029dd43045"} Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.990056 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="603113591e6d100214610284fa42bfbe7a0103752fa6e498e5858b029dd43045" Jan 28 17:13:56 crc kubenswrapper[4811]: I0128 17:13:56.989778 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qzb7h" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.224070 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79ddd5b787-8bg8x"] Jan 28 17:13:57 crc kubenswrapper[4811]: E0128 17:13:57.225683 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466b541a-ec7d-4826-947e-c57617810071" containerName="keystone-db-sync" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.225709 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="466b541a-ec7d-4826-947e-c57617810071" containerName="keystone-db-sync" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.225907 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="466b541a-ec7d-4826-947e-c57617810071" containerName="keystone-db-sync" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.226936 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.236189 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79ddd5b787-8bg8x"] Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.283052 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vslmg"] Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.284177 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.287373 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.287534 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.287657 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.287821 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.287940 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n898w" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.303066 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vslmg"] Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347119 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-dns-svc\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347192 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-combined-ca-bundle\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347229 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-credential-keys\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347257 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-config\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347426 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-fernet-keys\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347524 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdjf7\" (UniqueName: \"kubernetes.io/projected/cbc96676-d50e-495e-9109-d4db0c7f6af9-kube-api-access-fdjf7\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347564 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347583 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-nb\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347596 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-sb\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dglbn\" (UniqueName: \"kubernetes.io/projected/07a34720-79a1-402e-9e64-76cc4cb17d06-kube-api-access-dglbn\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.347712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-scripts\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.448896 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-fernet-keys\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.449931 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdjf7\" (UniqueName: \"kubernetes.io/projected/cbc96676-d50e-495e-9109-d4db0c7f6af9-kube-api-access-fdjf7\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.449967 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-sb\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.449982 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.449998 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-nb\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.450044 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dglbn\" (UniqueName: \"kubernetes.io/projected/07a34720-79a1-402e-9e64-76cc4cb17d06-kube-api-access-dglbn\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.450099 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-scripts\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.450400 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-dns-svc\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.450447 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-combined-ca-bundle\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.450473 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-credential-keys\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.450494 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-config\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.451014 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-nb\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.457294 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-sb\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.459878 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-dns-svc\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.462137 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-config\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.462464 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-combined-ca-bundle\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.463991 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-credential-keys\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.466217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.470060 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-fernet-keys\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.483109 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dglbn\" (UniqueName: \"kubernetes.io/projected/07a34720-79a1-402e-9e64-76cc4cb17d06-kube-api-access-dglbn\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.484881 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-scripts\") pod \"keystone-bootstrap-vslmg\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.487658 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdjf7\" (UniqueName: \"kubernetes.io/projected/cbc96676-d50e-495e-9109-d4db0c7f6af9-kube-api-access-fdjf7\") pod \"dnsmasq-dns-79ddd5b787-8bg8x\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.550897 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:13:57 crc kubenswrapper[4811]: I0128 17:13:57.643314 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:13:58 crc kubenswrapper[4811]: I0128 17:13:58.111699 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79ddd5b787-8bg8x"] Jan 28 17:13:58 crc kubenswrapper[4811]: I0128 17:13:58.202795 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vslmg"] Jan 28 17:13:58 crc kubenswrapper[4811]: W0128 17:13:58.212664 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07a34720_79a1_402e_9e64_76cc4cb17d06.slice/crio-9c2f0ee26f16a528e743336988fea202d1d7401bc4e2c7bd88d273df00c61669 WatchSource:0}: Error finding container 9c2f0ee26f16a528e743336988fea202d1d7401bc4e2c7bd88d273df00c61669: Status 404 returned error can't find the container with id 9c2f0ee26f16a528e743336988fea202d1d7401bc4e2c7bd88d273df00c61669 Jan 28 17:13:59 crc kubenswrapper[4811]: I0128 17:13:59.005715 4811 generic.go:334] "Generic (PLEG): container finished" podID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerID="9b1d4b77627a6fa549eb43af84e4e1be84c2789cbae50ef793fa72637506caa4" exitCode=0 Jan 28 17:13:59 crc kubenswrapper[4811]: I0128 17:13:59.005920 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" event={"ID":"cbc96676-d50e-495e-9109-d4db0c7f6af9","Type":"ContainerDied","Data":"9b1d4b77627a6fa549eb43af84e4e1be84c2789cbae50ef793fa72637506caa4"} Jan 28 17:13:59 crc kubenswrapper[4811]: I0128 17:13:59.006114 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" event={"ID":"cbc96676-d50e-495e-9109-d4db0c7f6af9","Type":"ContainerStarted","Data":"09c10a4e0369007fa254713383581cbcf3d75dd203fc684c63978615379324e0"} Jan 28 17:13:59 crc kubenswrapper[4811]: I0128 17:13:59.007936 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vslmg" event={"ID":"07a34720-79a1-402e-9e64-76cc4cb17d06","Type":"ContainerStarted","Data":"db466615f30a2918bcfce8570921db9db4997054485fde2eefde214523fb962f"} Jan 28 17:13:59 crc kubenswrapper[4811]: I0128 17:13:59.007986 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vslmg" event={"ID":"07a34720-79a1-402e-9e64-76cc4cb17d06","Type":"ContainerStarted","Data":"9c2f0ee26f16a528e743336988fea202d1d7401bc4e2c7bd88d273df00c61669"} Jan 28 17:13:59 crc kubenswrapper[4811]: I0128 17:13:59.057007 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vslmg" podStartSLOduration=2.056986574 podStartE2EDuration="2.056986574s" podCreationTimestamp="2026-01-28 17:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:13:59.049670385 +0000 UTC m=+5331.804033968" watchObservedRunningTime="2026-01-28 17:13:59.056986574 +0000 UTC m=+5331.811350157" Jan 28 17:14:00 crc kubenswrapper[4811]: I0128 17:14:00.019716 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" event={"ID":"cbc96676-d50e-495e-9109-d4db0c7f6af9","Type":"ContainerStarted","Data":"87e6965928cc872303d4d01c5837025a5bb31e2d0f9d3789d3680acafc5a787b"} Jan 28 17:14:00 crc kubenswrapper[4811]: I0128 17:14:00.021233 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:14:00 crc kubenswrapper[4811]: I0128 17:14:00.044598 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" podStartSLOduration=3.044578721 podStartE2EDuration="3.044578721s" podCreationTimestamp="2026-01-28 17:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:14:00.035478004 +0000 UTC m=+5332.789841597" watchObservedRunningTime="2026-01-28 17:14:00.044578721 +0000 UTC m=+5332.798942304" Jan 28 17:14:02 crc kubenswrapper[4811]: I0128 17:14:02.035055 4811 generic.go:334] "Generic (PLEG): container finished" podID="07a34720-79a1-402e-9e64-76cc4cb17d06" containerID="db466615f30a2918bcfce8570921db9db4997054485fde2eefde214523fb962f" exitCode=0 Jan 28 17:14:02 crc kubenswrapper[4811]: I0128 17:14:02.035624 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vslmg" event={"ID":"07a34720-79a1-402e-9e64-76cc4cb17d06","Type":"ContainerDied","Data":"db466615f30a2918bcfce8570921db9db4997054485fde2eefde214523fb962f"} Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.375626 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.545166 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-scripts\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.545251 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-fernet-keys\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.545316 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-combined-ca-bundle\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.545519 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dglbn\" (UniqueName: \"kubernetes.io/projected/07a34720-79a1-402e-9e64-76cc4cb17d06-kube-api-access-dglbn\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.545550 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-credential-keys\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.545615 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.551240 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-scripts" (OuterVolumeSpecName: "scripts") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.551237 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07a34720-79a1-402e-9e64-76cc4cb17d06-kube-api-access-dglbn" (OuterVolumeSpecName: "kube-api-access-dglbn") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06"). InnerVolumeSpecName "kube-api-access-dglbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.551397 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.551815 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:03 crc kubenswrapper[4811]: E0128 17:14:03.569479 4811 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data podName:07a34720-79a1-402e-9e64-76cc4cb17d06 nodeName:}" failed. No retries permitted until 2026-01-28 17:14:04.06938864 +0000 UTC m=+5336.823752233 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06") : error deleting /var/lib/kubelet/pods/07a34720-79a1-402e-9e64-76cc4cb17d06/volume-subpaths: remove /var/lib/kubelet/pods/07a34720-79a1-402e-9e64-76cc4cb17d06/volume-subpaths: no such file or directory Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.572515 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.647381 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dglbn\" (UniqueName: \"kubernetes.io/projected/07a34720-79a1-402e-9e64-76cc4cb17d06-kube-api-access-dglbn\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.647469 4811 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.647510 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.647541 4811 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:03 crc kubenswrapper[4811]: I0128 17:14:03.647566 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.054754 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vslmg" event={"ID":"07a34720-79a1-402e-9e64-76cc4cb17d06","Type":"ContainerDied","Data":"9c2f0ee26f16a528e743336988fea202d1d7401bc4e2c7bd88d273df00c61669"} Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.055059 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c2f0ee26f16a528e743336988fea202d1d7401bc4e2c7bd88d273df00c61669" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.054866 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vslmg" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.133652 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vslmg"] Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.139297 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vslmg"] Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.155142 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data\") pod \"07a34720-79a1-402e-9e64-76cc4cb17d06\" (UID: \"07a34720-79a1-402e-9e64-76cc4cb17d06\") " Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.159832 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data" (OuterVolumeSpecName: "config-data") pod "07a34720-79a1-402e-9e64-76cc4cb17d06" (UID: "07a34720-79a1-402e-9e64-76cc4cb17d06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.213921 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-h5pq8"] Jan 28 17:14:04 crc kubenswrapper[4811]: E0128 17:14:04.214298 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07a34720-79a1-402e-9e64-76cc4cb17d06" containerName="keystone-bootstrap" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.214326 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="07a34720-79a1-402e-9e64-76cc4cb17d06" containerName="keystone-bootstrap" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.214552 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="07a34720-79a1-402e-9e64-76cc4cb17d06" containerName="keystone-bootstrap" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.215252 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.223745 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-h5pq8"] Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.257334 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07a34720-79a1-402e-9e64-76cc4cb17d06-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.349920 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07a34720-79a1-402e-9e64-76cc4cb17d06" path="/var/lib/kubelet/pods/07a34720-79a1-402e-9e64-76cc4cb17d06/volumes" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.359337 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-credential-keys\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.359390 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq879\" (UniqueName: \"kubernetes.io/projected/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-kube-api-access-vq879\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.359413 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-fernet-keys\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.359475 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-combined-ca-bundle\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.359677 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-scripts\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.359745 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-config-data\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.461097 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-credential-keys\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.461195 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq879\" (UniqueName: \"kubernetes.io/projected/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-kube-api-access-vq879\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.461217 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-fernet-keys\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.461247 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-combined-ca-bundle\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.461313 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-scripts\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.461331 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-config-data\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.464810 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-credential-keys\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.464957 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-fernet-keys\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.464982 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-config-data\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.466221 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-scripts\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.467132 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-combined-ca-bundle\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.478257 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq879\" (UniqueName: \"kubernetes.io/projected/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-kube-api-access-vq879\") pod \"keystone-bootstrap-h5pq8\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.540185 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:04 crc kubenswrapper[4811]: I0128 17:14:04.952422 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-h5pq8"] Jan 28 17:14:05 crc kubenswrapper[4811]: I0128 17:14:05.064621 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h5pq8" event={"ID":"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a","Type":"ContainerStarted","Data":"53f786f099aad49f46917f226522fe0e5c438969488a25c83021cda31cd619c7"} Jan 28 17:14:06 crc kubenswrapper[4811]: I0128 17:14:06.073774 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h5pq8" event={"ID":"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a","Type":"ContainerStarted","Data":"cbe5f9bd21aecc43d58b2f68c963413df95f64c23f5ced8b8b29b403501cd12a"} Jan 28 17:14:06 crc kubenswrapper[4811]: I0128 17:14:06.100942 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-h5pq8" podStartSLOduration=2.100921666 podStartE2EDuration="2.100921666s" podCreationTimestamp="2026-01-28 17:14:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:14:06.09189636 +0000 UTC m=+5338.846259953" watchObservedRunningTime="2026-01-28 17:14:06.100921666 +0000 UTC m=+5338.855285249" Jan 28 17:14:07 crc kubenswrapper[4811]: I0128 17:14:07.553777 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:14:07 crc kubenswrapper[4811]: I0128 17:14:07.625495 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554585dc-mbttd"] Jan 28 17:14:07 crc kubenswrapper[4811]: I0128 17:14:07.626697 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554585dc-mbttd" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerName="dnsmasq-dns" containerID="cri-o://d595342cfabbc9b91f4fdbf9dc72ea898825c6fd1e6f1419adb950fe2bedc8fd" gracePeriod=10 Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.093505 4811 generic.go:334] "Generic (PLEG): container finished" podID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerID="d595342cfabbc9b91f4fdbf9dc72ea898825c6fd1e6f1419adb950fe2bedc8fd" exitCode=0 Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.093580 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554585dc-mbttd" event={"ID":"bfd82619-c9bd-4c64-a3a4-dd6b85b48126","Type":"ContainerDied","Data":"d595342cfabbc9b91f4fdbf9dc72ea898825c6fd1e6f1419adb950fe2bedc8fd"} Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.093886 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554585dc-mbttd" event={"ID":"bfd82619-c9bd-4c64-a3a4-dd6b85b48126","Type":"ContainerDied","Data":"8632d91695c0cb987e8895d8160146f12a3fa7ec93273555e77b921773dc5527"} Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.093906 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8632d91695c0cb987e8895d8160146f12a3fa7ec93273555e77b921773dc5527" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.120837 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.223730 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-nb\") pod \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.223798 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-sb\") pod \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.223927 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5fmv\" (UniqueName: \"kubernetes.io/projected/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-kube-api-access-z5fmv\") pod \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.223958 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-config\") pod \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.224042 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-dns-svc\") pod \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\" (UID: \"bfd82619-c9bd-4c64-a3a4-dd6b85b48126\") " Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.231266 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-kube-api-access-z5fmv" (OuterVolumeSpecName: "kube-api-access-z5fmv") pod "bfd82619-c9bd-4c64-a3a4-dd6b85b48126" (UID: "bfd82619-c9bd-4c64-a3a4-dd6b85b48126"). InnerVolumeSpecName "kube-api-access-z5fmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.260285 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bfd82619-c9bd-4c64-a3a4-dd6b85b48126" (UID: "bfd82619-c9bd-4c64-a3a4-dd6b85b48126"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.264084 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-config" (OuterVolumeSpecName: "config") pod "bfd82619-c9bd-4c64-a3a4-dd6b85b48126" (UID: "bfd82619-c9bd-4c64-a3a4-dd6b85b48126"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.267844 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bfd82619-c9bd-4c64-a3a4-dd6b85b48126" (UID: "bfd82619-c9bd-4c64-a3a4-dd6b85b48126"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.270718 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bfd82619-c9bd-4c64-a3a4-dd6b85b48126" (UID: "bfd82619-c9bd-4c64-a3a4-dd6b85b48126"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.325656 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.325694 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.325704 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5fmv\" (UniqueName: \"kubernetes.io/projected/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-kube-api-access-z5fmv\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.325717 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:08 crc kubenswrapper[4811]: I0128 17:14:08.325726 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfd82619-c9bd-4c64-a3a4-dd6b85b48126-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:09 crc kubenswrapper[4811]: I0128 17:14:09.103168 4811 generic.go:334] "Generic (PLEG): container finished" podID="aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" containerID="cbe5f9bd21aecc43d58b2f68c963413df95f64c23f5ced8b8b29b403501cd12a" exitCode=0 Jan 28 17:14:09 crc kubenswrapper[4811]: I0128 17:14:09.103254 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554585dc-mbttd" Jan 28 17:14:09 crc kubenswrapper[4811]: I0128 17:14:09.103245 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h5pq8" event={"ID":"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a","Type":"ContainerDied","Data":"cbe5f9bd21aecc43d58b2f68c963413df95f64c23f5ced8b8b29b403501cd12a"} Jan 28 17:14:09 crc kubenswrapper[4811]: I0128 17:14:09.153580 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554585dc-mbttd"] Jan 28 17:14:09 crc kubenswrapper[4811]: I0128 17:14:09.160757 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554585dc-mbttd"] Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.349788 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" path="/var/lib/kubelet/pods/bfd82619-c9bd-4c64-a3a4-dd6b85b48126/volumes" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.430583 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.576238 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq879\" (UniqueName: \"kubernetes.io/projected/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-kube-api-access-vq879\") pod \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.576298 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-credential-keys\") pod \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.576377 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-config-data\") pod \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.576422 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-combined-ca-bundle\") pod \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.576548 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-scripts\") pod \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.576680 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-fernet-keys\") pod \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\" (UID: \"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a\") " Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.583586 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" (UID: "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.583794 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" (UID: "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.585060 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-scripts" (OuterVolumeSpecName: "scripts") pod "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" (UID: "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.588054 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-kube-api-access-vq879" (OuterVolumeSpecName: "kube-api-access-vq879") pod "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" (UID: "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a"). InnerVolumeSpecName "kube-api-access-vq879". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.604165 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" (UID: "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.604980 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-config-data" (OuterVolumeSpecName: "config-data") pod "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" (UID: "aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.678141 4811 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.678171 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.678181 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.678190 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.678199 4811 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:10 crc kubenswrapper[4811]: I0128 17:14:10.678207 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq879\" (UniqueName: \"kubernetes.io/projected/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a-kube-api-access-vq879\") on node \"crc\" DevicePath \"\"" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.118324 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h5pq8" event={"ID":"aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a","Type":"ContainerDied","Data":"53f786f099aad49f46917f226522fe0e5c438969488a25c83021cda31cd619c7"} Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.118365 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53f786f099aad49f46917f226522fe0e5c438969488a25c83021cda31cd619c7" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.118388 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h5pq8" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.227458 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-56f69fc7cc-s5j8w"] Jan 28 17:14:11 crc kubenswrapper[4811]: E0128 17:14:11.227770 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerName="init" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.227786 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerName="init" Jan 28 17:14:11 crc kubenswrapper[4811]: E0128 17:14:11.227810 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerName="dnsmasq-dns" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.227817 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerName="dnsmasq-dns" Jan 28 17:14:11 crc kubenswrapper[4811]: E0128 17:14:11.227829 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" containerName="keystone-bootstrap" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.227836 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" containerName="keystone-bootstrap" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.227982 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" containerName="keystone-bootstrap" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.228003 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfd82619-c9bd-4c64-a3a4-dd6b85b48126" containerName="dnsmasq-dns" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.228556 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.232236 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n898w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.232559 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.232764 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.233882 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.245120 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-56f69fc7cc-s5j8w"] Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.389470 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-credential-keys\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.389589 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-config-data\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.389724 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-scripts\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.389803 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj9gs\" (UniqueName: \"kubernetes.io/projected/44877e6c-40f0-4f95-93f3-18bfd195a9bb-kube-api-access-sj9gs\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.389851 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-fernet-keys\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.389927 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-combined-ca-bundle\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.491451 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-credential-keys\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.491546 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-config-data\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.491572 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-scripts\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.491594 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj9gs\" (UniqueName: \"kubernetes.io/projected/44877e6c-40f0-4f95-93f3-18bfd195a9bb-kube-api-access-sj9gs\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.491614 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-fernet-keys\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.491657 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-combined-ca-bundle\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.495531 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-scripts\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.495788 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-combined-ca-bundle\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.497090 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-config-data\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.503591 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-credential-keys\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.509060 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/44877e6c-40f0-4f95-93f3-18bfd195a9bb-fernet-keys\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.511070 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj9gs\" (UniqueName: \"kubernetes.io/projected/44877e6c-40f0-4f95-93f3-18bfd195a9bb-kube-api-access-sj9gs\") pod \"keystone-56f69fc7cc-s5j8w\" (UID: \"44877e6c-40f0-4f95-93f3-18bfd195a9bb\") " pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:11 crc kubenswrapper[4811]: I0128 17:14:11.547029 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:12 crc kubenswrapper[4811]: I0128 17:14:12.037331 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-56f69fc7cc-s5j8w"] Jan 28 17:14:12 crc kubenswrapper[4811]: W0128 17:14:12.043656 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44877e6c_40f0_4f95_93f3_18bfd195a9bb.slice/crio-a2581e434a47eaff711e16c1f5e6ff317ade2b744c97af34aab00f825ef7a2a3 WatchSource:0}: Error finding container a2581e434a47eaff711e16c1f5e6ff317ade2b744c97af34aab00f825ef7a2a3: Status 404 returned error can't find the container with id a2581e434a47eaff711e16c1f5e6ff317ade2b744c97af34aab00f825ef7a2a3 Jan 28 17:14:12 crc kubenswrapper[4811]: I0128 17:14:12.127556 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-56f69fc7cc-s5j8w" event={"ID":"44877e6c-40f0-4f95-93f3-18bfd195a9bb","Type":"ContainerStarted","Data":"a2581e434a47eaff711e16c1f5e6ff317ade2b744c97af34aab00f825ef7a2a3"} Jan 28 17:14:13 crc kubenswrapper[4811]: I0128 17:14:13.136605 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-56f69fc7cc-s5j8w" event={"ID":"44877e6c-40f0-4f95-93f3-18bfd195a9bb","Type":"ContainerStarted","Data":"baec2eac2e51a601f33873d9e6d22610878c7ac795a19d7c6c4a0c2bb826e361"} Jan 28 17:14:13 crc kubenswrapper[4811]: I0128 17:14:13.136935 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:13 crc kubenswrapper[4811]: I0128 17:14:13.158275 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-56f69fc7cc-s5j8w" podStartSLOduration=2.158253202 podStartE2EDuration="2.158253202s" podCreationTimestamp="2026-01-28 17:14:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:14:13.151660833 +0000 UTC m=+5345.906024426" watchObservedRunningTime="2026-01-28 17:14:13.158253202 +0000 UTC m=+5345.912616785" Jan 28 17:14:33 crc kubenswrapper[4811]: I0128 17:14:33.087349 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:14:33 crc kubenswrapper[4811]: I0128 17:14:33.087931 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:14:43 crc kubenswrapper[4811]: I0128 17:14:43.080622 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-56f69fc7cc-s5j8w" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.542183 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.544035 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.546045 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-rzrsc" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.547590 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.555777 4811 status_manager.go:875] "Failed to update status for pod" pod="openstack/openstackclient" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T17:14:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T17:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T17:14:46Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-28T17:14:46Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:2b4f8494513a3af102066fec5868ab167ac8664aceb2f0c639d7a0b60260a944\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"openstackclient\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/clouds.yaml\\\",\\\"name\\\":\\\"openstack-config\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/secure.yaml\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/cloudrc\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-28T17:14:46Z\\\"}}\" for pod \"openstack\"/\"openstackclient\": pods \"openstackclient\" not found" Jan 28 17:14:46 crc kubenswrapper[4811]: W0128 17:14:46.558032 4811 reflector.go:561] object-"openstack"/"openstack-config": failed to list *v1.ConfigMap: configmaps "openstack-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 28 17:14:46 crc kubenswrapper[4811]: E0128 17:14:46.558100 4811 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"openstack-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openstack-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.559527 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.572491 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:46 crc kubenswrapper[4811]: E0128 17:14:46.579985 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-httfg openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[kube-api-access-httfg openstack-config openstack-config-secret]: context canceled" pod="openstack/openstackclient" podUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.585450 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.594406 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.595739 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.602733 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.619215 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.753749 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f927p\" (UniqueName: \"kubernetes.io/projected/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-kube-api-access-f927p\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.753842 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.754077 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config-secret\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.856214 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config-secret\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.856347 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f927p\" (UniqueName: \"kubernetes.io/projected/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-kube-api-access-f927p\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.856398 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.862570 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config-secret\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:46 crc kubenswrapper[4811]: I0128 17:14:46.875359 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f927p\" (UniqueName: \"kubernetes.io/projected/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-kube-api-access-f927p\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:47 crc kubenswrapper[4811]: I0128 17:14:47.423311 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:14:47 crc kubenswrapper[4811]: I0128 17:14:47.427690 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" Jan 28 17:14:47 crc kubenswrapper[4811]: I0128 17:14:47.434712 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:14:47 crc kubenswrapper[4811]: I0128 17:14:47.437566 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" Jan 28 17:14:47 crc kubenswrapper[4811]: I0128 17:14:47.836034 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 28 17:14:47 crc kubenswrapper[4811]: I0128 17:14:47.838027 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config\") pod \"openstackclient\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " pod="openstack/openstackclient" Jan 28 17:14:48 crc kubenswrapper[4811]: I0128 17:14:48.120601 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:14:48 crc kubenswrapper[4811]: I0128 17:14:48.349866 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" path="/var/lib/kubelet/pods/3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b/volumes" Jan 28 17:14:48 crc kubenswrapper[4811]: I0128 17:14:48.429999 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:14:48 crc kubenswrapper[4811]: I0128 17:14:48.434080 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" Jan 28 17:14:48 crc kubenswrapper[4811]: I0128 17:14:48.437115 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="3bb3ec9a-6dcf-4a50-b14b-3e408307ea1b" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" Jan 28 17:14:48 crc kubenswrapper[4811]: I0128 17:14:48.631949 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 17:14:49 crc kubenswrapper[4811]: I0128 17:14:49.437374 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"08cfa5b2-641d-4b3e-89d2-81a5af8f3834","Type":"ContainerStarted","Data":"eac79f1b01bdef15ac09ff6ed35630e6acd06463265746d75bb314588a785b46"} Jan 28 17:14:49 crc kubenswrapper[4811]: I0128 17:14:49.437709 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"08cfa5b2-641d-4b3e-89d2-81a5af8f3834","Type":"ContainerStarted","Data":"669bfb1229f7704e4db70553fee7295cbe1af54cf356d0a8033aa83087b25253"} Jan 28 17:14:49 crc kubenswrapper[4811]: I0128 17:14:49.461000 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.460973989 podStartE2EDuration="3.460973989s" podCreationTimestamp="2026-01-28 17:14:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:14:49.451271275 +0000 UTC m=+5382.205634868" watchObservedRunningTime="2026-01-28 17:14:49.460973989 +0000 UTC m=+5382.215337592" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.152300 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn"] Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.154556 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.159536 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.163232 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.174787 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn"] Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.283925 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbnch\" (UniqueName: \"kubernetes.io/projected/c1357af3-97dc-4364-8c0f-88e491dd605d-kube-api-access-jbnch\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.284004 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1357af3-97dc-4364-8c0f-88e491dd605d-secret-volume\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.284136 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1357af3-97dc-4364-8c0f-88e491dd605d-config-volume\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.385481 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1357af3-97dc-4364-8c0f-88e491dd605d-config-volume\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.385536 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbnch\" (UniqueName: \"kubernetes.io/projected/c1357af3-97dc-4364-8c0f-88e491dd605d-kube-api-access-jbnch\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.385576 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1357af3-97dc-4364-8c0f-88e491dd605d-secret-volume\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.386506 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1357af3-97dc-4364-8c0f-88e491dd605d-config-volume\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.393549 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1357af3-97dc-4364-8c0f-88e491dd605d-secret-volume\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.407857 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbnch\" (UniqueName: \"kubernetes.io/projected/c1357af3-97dc-4364-8c0f-88e491dd605d-kube-api-access-jbnch\") pod \"collect-profiles-29493675-dv2kn\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.479915 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:00 crc kubenswrapper[4811]: I0128 17:15:00.924352 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn"] Jan 28 17:15:01 crc kubenswrapper[4811]: I0128 17:15:01.536255 4811 generic.go:334] "Generic (PLEG): container finished" podID="c1357af3-97dc-4364-8c0f-88e491dd605d" containerID="24f4d1bbb82b607e8f4c22f7a51f59946c92afba512f147a7c68d6cc3793d82f" exitCode=0 Jan 28 17:15:01 crc kubenswrapper[4811]: I0128 17:15:01.536303 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" event={"ID":"c1357af3-97dc-4364-8c0f-88e491dd605d","Type":"ContainerDied","Data":"24f4d1bbb82b607e8f4c22f7a51f59946c92afba512f147a7c68d6cc3793d82f"} Jan 28 17:15:01 crc kubenswrapper[4811]: I0128 17:15:01.537417 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" event={"ID":"c1357af3-97dc-4364-8c0f-88e491dd605d","Type":"ContainerStarted","Data":"18b55e8bb80f0e69960cced919115d0fb38a1482781e5a547737ff753ee61e70"} Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.879380 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.934524 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbnch\" (UniqueName: \"kubernetes.io/projected/c1357af3-97dc-4364-8c0f-88e491dd605d-kube-api-access-jbnch\") pod \"c1357af3-97dc-4364-8c0f-88e491dd605d\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.934640 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1357af3-97dc-4364-8c0f-88e491dd605d-config-volume\") pod \"c1357af3-97dc-4364-8c0f-88e491dd605d\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.934738 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1357af3-97dc-4364-8c0f-88e491dd605d-secret-volume\") pod \"c1357af3-97dc-4364-8c0f-88e491dd605d\" (UID: \"c1357af3-97dc-4364-8c0f-88e491dd605d\") " Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.935622 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1357af3-97dc-4364-8c0f-88e491dd605d-config-volume" (OuterVolumeSpecName: "config-volume") pod "c1357af3-97dc-4364-8c0f-88e491dd605d" (UID: "c1357af3-97dc-4364-8c0f-88e491dd605d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.941903 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1357af3-97dc-4364-8c0f-88e491dd605d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c1357af3-97dc-4364-8c0f-88e491dd605d" (UID: "c1357af3-97dc-4364-8c0f-88e491dd605d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:15:02 crc kubenswrapper[4811]: I0128 17:15:02.942048 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1357af3-97dc-4364-8c0f-88e491dd605d-kube-api-access-jbnch" (OuterVolumeSpecName: "kube-api-access-jbnch") pod "c1357af3-97dc-4364-8c0f-88e491dd605d" (UID: "c1357af3-97dc-4364-8c0f-88e491dd605d"). InnerVolumeSpecName "kube-api-access-jbnch". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.036920 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbnch\" (UniqueName: \"kubernetes.io/projected/c1357af3-97dc-4364-8c0f-88e491dd605d-kube-api-access-jbnch\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.037206 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1357af3-97dc-4364-8c0f-88e491dd605d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.037216 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1357af3-97dc-4364-8c0f-88e491dd605d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.088268 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.088415 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.257361 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2shwm"] Jan 28 17:15:03 crc kubenswrapper[4811]: E0128 17:15:03.257772 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1357af3-97dc-4364-8c0f-88e491dd605d" containerName="collect-profiles" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.257791 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1357af3-97dc-4364-8c0f-88e491dd605d" containerName="collect-profiles" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.257963 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1357af3-97dc-4364-8c0f-88e491dd605d" containerName="collect-profiles" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.259134 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.276466 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2shwm"] Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.342081 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-utilities\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.342206 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz5jb\" (UniqueName: \"kubernetes.io/projected/18ae189c-f7c8-421c-958e-44dd946461a6-kube-api-access-fz5jb\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.342267 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-catalog-content\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.443722 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-catalog-content\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.444150 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-catalog-content\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.444331 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-utilities\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.444774 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-utilities\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.445759 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz5jb\" (UniqueName: \"kubernetes.io/projected/18ae189c-f7c8-421c-958e-44dd946461a6-kube-api-access-fz5jb\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.463283 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz5jb\" (UniqueName: \"kubernetes.io/projected/18ae189c-f7c8-421c-958e-44dd946461a6-kube-api-access-fz5jb\") pod \"certified-operators-2shwm\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.555673 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" event={"ID":"c1357af3-97dc-4364-8c0f-88e491dd605d","Type":"ContainerDied","Data":"18b55e8bb80f0e69960cced919115d0fb38a1482781e5a547737ff753ee61e70"} Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.555742 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18b55e8bb80f0e69960cced919115d0fb38a1482781e5a547737ff753ee61e70" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.555835 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.596460 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.875700 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2shwm"] Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.965951 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86"] Jan 28 17:15:03 crc kubenswrapper[4811]: I0128 17:15:03.972287 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493630-njk86"] Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.058761 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pfcb5"] Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.060363 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.083902 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pfcb5"] Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.155756 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr2s9\" (UniqueName: \"kubernetes.io/projected/ecae9ce4-9369-4774-aada-5233f835250c-kube-api-access-hr2s9\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.155836 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-catalog-content\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.156085 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-utilities\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.258342 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr2s9\" (UniqueName: \"kubernetes.io/projected/ecae9ce4-9369-4774-aada-5233f835250c-kube-api-access-hr2s9\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.258445 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-catalog-content\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.258514 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-utilities\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.259164 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-utilities\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.259298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-catalog-content\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.279259 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr2s9\" (UniqueName: \"kubernetes.io/projected/ecae9ce4-9369-4774-aada-5233f835250c-kube-api-access-hr2s9\") pod \"community-operators-pfcb5\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.350153 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6cdc2d6-b33b-45b4-89ee-5cc3bb745442" path="/var/lib/kubelet/pods/e6cdc2d6-b33b-45b4-89ee-5cc3bb745442/volumes" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.380888 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.573247 4811 generic.go:334] "Generic (PLEG): container finished" podID="18ae189c-f7c8-421c-958e-44dd946461a6" containerID="d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec" exitCode=0 Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.573310 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerDied","Data":"d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec"} Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.573352 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerStarted","Data":"3d84de0ecd458e23308e0ae265f80e466786742a7a2ffa4e4fdbaab96168447a"} Jan 28 17:15:04 crc kubenswrapper[4811]: I0128 17:15:04.744056 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pfcb5"] Jan 28 17:15:05 crc kubenswrapper[4811]: I0128 17:15:05.585571 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerStarted","Data":"a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9"} Jan 28 17:15:05 crc kubenswrapper[4811]: I0128 17:15:05.589049 4811 generic.go:334] "Generic (PLEG): container finished" podID="ecae9ce4-9369-4774-aada-5233f835250c" containerID="0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0" exitCode=0 Jan 28 17:15:05 crc kubenswrapper[4811]: I0128 17:15:05.589081 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfcb5" event={"ID":"ecae9ce4-9369-4774-aada-5233f835250c","Type":"ContainerDied","Data":"0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0"} Jan 28 17:15:05 crc kubenswrapper[4811]: I0128 17:15:05.589100 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfcb5" event={"ID":"ecae9ce4-9369-4774-aada-5233f835250c","Type":"ContainerStarted","Data":"f2ea20d8fd5163db72e620d64fa63ecbd679d4ec8aa51c4d973b9d8e1ff9e387"} Jan 28 17:15:06 crc kubenswrapper[4811]: I0128 17:15:06.598571 4811 generic.go:334] "Generic (PLEG): container finished" podID="18ae189c-f7c8-421c-958e-44dd946461a6" containerID="a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9" exitCode=0 Jan 28 17:15:06 crc kubenswrapper[4811]: I0128 17:15:06.598627 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerDied","Data":"a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9"} Jan 28 17:15:07 crc kubenswrapper[4811]: I0128 17:15:07.608552 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerStarted","Data":"d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35"} Jan 28 17:15:07 crc kubenswrapper[4811]: I0128 17:15:07.612551 4811 generic.go:334] "Generic (PLEG): container finished" podID="ecae9ce4-9369-4774-aada-5233f835250c" containerID="b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a" exitCode=0 Jan 28 17:15:07 crc kubenswrapper[4811]: I0128 17:15:07.612611 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfcb5" event={"ID":"ecae9ce4-9369-4774-aada-5233f835250c","Type":"ContainerDied","Data":"b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a"} Jan 28 17:15:07 crc kubenswrapper[4811]: I0128 17:15:07.638835 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2shwm" podStartSLOduration=1.863663103 podStartE2EDuration="4.638813094s" podCreationTimestamp="2026-01-28 17:15:03 +0000 UTC" firstStartedPulling="2026-01-28 17:15:04.578470596 +0000 UTC m=+5397.332834179" lastFinishedPulling="2026-01-28 17:15:07.353620587 +0000 UTC m=+5400.107984170" observedRunningTime="2026-01-28 17:15:07.630535889 +0000 UTC m=+5400.384899472" watchObservedRunningTime="2026-01-28 17:15:07.638813094 +0000 UTC m=+5400.393176677" Jan 28 17:15:08 crc kubenswrapper[4811]: I0128 17:15:08.622073 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfcb5" event={"ID":"ecae9ce4-9369-4774-aada-5233f835250c","Type":"ContainerStarted","Data":"8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2"} Jan 28 17:15:08 crc kubenswrapper[4811]: I0128 17:15:08.645010 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pfcb5" podStartSLOduration=2.247665087 podStartE2EDuration="4.644986248s" podCreationTimestamp="2026-01-28 17:15:04 +0000 UTC" firstStartedPulling="2026-01-28 17:15:05.591597489 +0000 UTC m=+5398.345961072" lastFinishedPulling="2026-01-28 17:15:07.98891863 +0000 UTC m=+5400.743282233" observedRunningTime="2026-01-28 17:15:08.640200267 +0000 UTC m=+5401.394563840" watchObservedRunningTime="2026-01-28 17:15:08.644986248 +0000 UTC m=+5401.399349831" Jan 28 17:15:13 crc kubenswrapper[4811]: I0128 17:15:13.597537 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:13 crc kubenswrapper[4811]: I0128 17:15:13.598424 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:13 crc kubenswrapper[4811]: I0128 17:15:13.668635 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:14 crc kubenswrapper[4811]: I0128 17:15:14.381870 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:14 crc kubenswrapper[4811]: I0128 17:15:14.382423 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:14 crc kubenswrapper[4811]: I0128 17:15:14.430287 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:14 crc kubenswrapper[4811]: I0128 17:15:14.714314 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:14 crc kubenswrapper[4811]: I0128 17:15:14.715202 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:14 crc kubenswrapper[4811]: I0128 17:15:14.907570 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2shwm"] Jan 28 17:15:16 crc kubenswrapper[4811]: I0128 17:15:16.681483 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2shwm" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="registry-server" containerID="cri-o://d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35" gracePeriod=2 Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.101196 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pfcb5"] Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.101501 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pfcb5" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="registry-server" containerID="cri-o://8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2" gracePeriod=2 Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.526678 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.599854 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-utilities\") pod \"ecae9ce4-9369-4774-aada-5233f835250c\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.600018 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-catalog-content\") pod \"ecae9ce4-9369-4774-aada-5233f835250c\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.600049 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr2s9\" (UniqueName: \"kubernetes.io/projected/ecae9ce4-9369-4774-aada-5233f835250c-kube-api-access-hr2s9\") pod \"ecae9ce4-9369-4774-aada-5233f835250c\" (UID: \"ecae9ce4-9369-4774-aada-5233f835250c\") " Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.601890 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-utilities" (OuterVolumeSpecName: "utilities") pod "ecae9ce4-9369-4774-aada-5233f835250c" (UID: "ecae9ce4-9369-4774-aada-5233f835250c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.610318 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecae9ce4-9369-4774-aada-5233f835250c-kube-api-access-hr2s9" (OuterVolumeSpecName: "kube-api-access-hr2s9") pod "ecae9ce4-9369-4774-aada-5233f835250c" (UID: "ecae9ce4-9369-4774-aada-5233f835250c"). InnerVolumeSpecName "kube-api-access-hr2s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.653266 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.693526 4811 generic.go:334] "Generic (PLEG): container finished" podID="18ae189c-f7c8-421c-958e-44dd946461a6" containerID="d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35" exitCode=0 Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.693588 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerDied","Data":"d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35"} Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.693624 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2shwm" event={"ID":"18ae189c-f7c8-421c-958e-44dd946461a6","Type":"ContainerDied","Data":"3d84de0ecd458e23308e0ae265f80e466786742a7a2ffa4e4fdbaab96168447a"} Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.693642 4811 scope.go:117] "RemoveContainer" containerID="d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.693679 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2shwm" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.697471 4811 generic.go:334] "Generic (PLEG): container finished" podID="ecae9ce4-9369-4774-aada-5233f835250c" containerID="8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2" exitCode=0 Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.697527 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfcb5" event={"ID":"ecae9ce4-9369-4774-aada-5233f835250c","Type":"ContainerDied","Data":"8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2"} Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.697565 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfcb5" event={"ID":"ecae9ce4-9369-4774-aada-5233f835250c","Type":"ContainerDied","Data":"f2ea20d8fd5163db72e620d64fa63ecbd679d4ec8aa51c4d973b9d8e1ff9e387"} Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.697668 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfcb5" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.702237 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr2s9\" (UniqueName: \"kubernetes.io/projected/ecae9ce4-9369-4774-aada-5233f835250c-kube-api-access-hr2s9\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.702277 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.716896 4811 scope.go:117] "RemoveContainer" containerID="a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.737647 4811 scope.go:117] "RemoveContainer" containerID="d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.760847 4811 scope.go:117] "RemoveContainer" containerID="d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35" Jan 28 17:15:17 crc kubenswrapper[4811]: E0128 17:15:17.761505 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35\": container with ID starting with d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35 not found: ID does not exist" containerID="d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.761787 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35"} err="failed to get container status \"d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35\": rpc error: code = NotFound desc = could not find container \"d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35\": container with ID starting with d4be119d6a9765d01e7b83239a7edce5ffa2738c76a20fcf27d134a2cf693d35 not found: ID does not exist" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.761832 4811 scope.go:117] "RemoveContainer" containerID="a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9" Jan 28 17:15:17 crc kubenswrapper[4811]: E0128 17:15:17.762362 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9\": container with ID starting with a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9 not found: ID does not exist" containerID="a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.762394 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9"} err="failed to get container status \"a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9\": rpc error: code = NotFound desc = could not find container \"a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9\": container with ID starting with a689cce826186b3aea76b40f1af2f05dcc4dfe41bf0a218835739ad0159ed5e9 not found: ID does not exist" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.762418 4811 scope.go:117] "RemoveContainer" containerID="d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec" Jan 28 17:15:17 crc kubenswrapper[4811]: E0128 17:15:17.762700 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec\": container with ID starting with d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec not found: ID does not exist" containerID="d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.762729 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec"} err="failed to get container status \"d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec\": rpc error: code = NotFound desc = could not find container \"d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec\": container with ID starting with d49df256ddbff794db5ac9fe3738a2db4ecceb177c7b1849258b07c2554a47ec not found: ID does not exist" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.762750 4811 scope.go:117] "RemoveContainer" containerID="8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.781309 4811 scope.go:117] "RemoveContainer" containerID="b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.800349 4811 scope.go:117] "RemoveContainer" containerID="0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.804846 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz5jb\" (UniqueName: \"kubernetes.io/projected/18ae189c-f7c8-421c-958e-44dd946461a6-kube-api-access-fz5jb\") pod \"18ae189c-f7c8-421c-958e-44dd946461a6\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.805034 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-utilities\") pod \"18ae189c-f7c8-421c-958e-44dd946461a6\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.806154 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-catalog-content\") pod \"18ae189c-f7c8-421c-958e-44dd946461a6\" (UID: \"18ae189c-f7c8-421c-958e-44dd946461a6\") " Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.809887 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18ae189c-f7c8-421c-958e-44dd946461a6-kube-api-access-fz5jb" (OuterVolumeSpecName: "kube-api-access-fz5jb") pod "18ae189c-f7c8-421c-958e-44dd946461a6" (UID: "18ae189c-f7c8-421c-958e-44dd946461a6"). InnerVolumeSpecName "kube-api-access-fz5jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.814612 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-utilities" (OuterVolumeSpecName: "utilities") pod "18ae189c-f7c8-421c-958e-44dd946461a6" (UID: "18ae189c-f7c8-421c-958e-44dd946461a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.854016 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18ae189c-f7c8-421c-958e-44dd946461a6" (UID: "18ae189c-f7c8-421c-958e-44dd946461a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.876313 4811 scope.go:117] "RemoveContainer" containerID="8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2" Jan 28 17:15:17 crc kubenswrapper[4811]: E0128 17:15:17.876865 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2\": container with ID starting with 8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2 not found: ID does not exist" containerID="8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.876922 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2"} err="failed to get container status \"8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2\": rpc error: code = NotFound desc = could not find container \"8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2\": container with ID starting with 8deb0140fa6469416a4c18c24d8749f012dc607847994d8fce45512098cefde2 not found: ID does not exist" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.876954 4811 scope.go:117] "RemoveContainer" containerID="b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a" Jan 28 17:15:17 crc kubenswrapper[4811]: E0128 17:15:17.877342 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a\": container with ID starting with b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a not found: ID does not exist" containerID="b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.877378 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a"} err="failed to get container status \"b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a\": rpc error: code = NotFound desc = could not find container \"b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a\": container with ID starting with b02dab9dfa95d720a2c44271e9ada20d8bf1db86ee22c163e05f02d33b1bfd6a not found: ID does not exist" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.877403 4811 scope.go:117] "RemoveContainer" containerID="0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0" Jan 28 17:15:17 crc kubenswrapper[4811]: E0128 17:15:17.877850 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0\": container with ID starting with 0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0 not found: ID does not exist" containerID="0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.877918 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0"} err="failed to get container status \"0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0\": rpc error: code = NotFound desc = could not find container \"0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0\": container with ID starting with 0a089b815327d33a5f664087e6a0a3aee29916cbfdc2a7485569c746a537a3c0 not found: ID does not exist" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.908644 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.908680 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz5jb\" (UniqueName: \"kubernetes.io/projected/18ae189c-f7c8-421c-958e-44dd946461a6-kube-api-access-fz5jb\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:17 crc kubenswrapper[4811]: I0128 17:15:17.908695 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18ae189c-f7c8-421c-958e-44dd946461a6-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.026900 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2shwm"] Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.034221 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2shwm"] Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.354887 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" path="/var/lib/kubelet/pods/18ae189c-f7c8-421c-958e-44dd946461a6/volumes" Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.364011 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecae9ce4-9369-4774-aada-5233f835250c" (UID: "ecae9ce4-9369-4774-aada-5233f835250c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.417771 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecae9ce4-9369-4774-aada-5233f835250c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.639061 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pfcb5"] Jan 28 17:15:18 crc kubenswrapper[4811]: I0128 17:15:18.646247 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pfcb5"] Jan 28 17:15:20 crc kubenswrapper[4811]: I0128 17:15:20.350284 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecae9ce4-9369-4774-aada-5233f835250c" path="/var/lib/kubelet/pods/ecae9ce4-9369-4774-aada-5233f835250c/volumes" Jan 28 17:15:30 crc kubenswrapper[4811]: I0128 17:15:30.468370 4811 scope.go:117] "RemoveContainer" containerID="854068093eb5667b6e160cae55ea3c917e1ef54848975ac41a508e0ada8ac6c0" Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.088176 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.088774 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.088831 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.090336 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aa24462d3859954f62e518a2517851c0125e8860de36202094bb80d1adc65f98"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.090452 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://aa24462d3859954f62e518a2517851c0125e8860de36202094bb80d1adc65f98" gracePeriod=600 Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.829924 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="aa24462d3859954f62e518a2517851c0125e8860de36202094bb80d1adc65f98" exitCode=0 Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.829969 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"aa24462d3859954f62e518a2517851c0125e8860de36202094bb80d1adc65f98"} Jan 28 17:15:33 crc kubenswrapper[4811]: I0128 17:15:33.830003 4811 scope.go:117] "RemoveContainer" containerID="81a84787cf417a50710a20c50547d9e6d7f6adc8c98070a304ddbd77fa4c5ebc" Jan 28 17:15:34 crc kubenswrapper[4811]: I0128 17:15:34.844886 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091"} Jan 28 17:15:51 crc kubenswrapper[4811]: E0128 17:15:51.508625 4811 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.233:54830->38.102.83.233:37263: write tcp 38.102.83.233:54830->38.102.83.233:37263: write: broken pipe Jan 28 17:16:29 crc kubenswrapper[4811]: I0128 17:16:29.054807 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rr6sj"] Jan 28 17:16:29 crc kubenswrapper[4811]: I0128 17:16:29.063040 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rr6sj"] Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.380647 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d95d45b-5089-4861-8542-d2a8e3839028" path="/var/lib/kubelet/pods/4d95d45b-5089-4861-8542-d2a8e3839028/volumes" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.512738 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-m9pwb"] Jan 28 17:16:30 crc kubenswrapper[4811]: E0128 17:16:30.513065 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="registry-server" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513084 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="registry-server" Jan 28 17:16:30 crc kubenswrapper[4811]: E0128 17:16:30.513100 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="extract-utilities" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513106 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="extract-utilities" Jan 28 17:16:30 crc kubenswrapper[4811]: E0128 17:16:30.513122 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="extract-utilities" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513128 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="extract-utilities" Jan 28 17:16:30 crc kubenswrapper[4811]: E0128 17:16:30.513139 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="extract-content" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513144 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="extract-content" Jan 28 17:16:30 crc kubenswrapper[4811]: E0128 17:16:30.513151 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="registry-server" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513157 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="registry-server" Jan 28 17:16:30 crc kubenswrapper[4811]: E0128 17:16:30.513208 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="extract-content" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513214 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="extract-content" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513380 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="18ae189c-f7c8-421c-958e-44dd946461a6" containerName="registry-server" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513392 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecae9ce4-9369-4774-aada-5233f835250c" containerName="registry-server" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.513971 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.524637 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-9b88-account-create-update-8r9wc"] Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.526743 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.532633 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.538164 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-m9pwb"] Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.548245 4811 scope.go:117] "RemoveContainer" containerID="579bb675436dc02c9317d78f33a666aa7bb9f251259cb48a9fd03e14eb0b8f82" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.554778 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9b88-account-create-update-8r9wc"] Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.640866 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-487gz\" (UniqueName: \"kubernetes.io/projected/15d20415-1194-4077-9c8c-29ecf1c7e286-kube-api-access-487gz\") pod \"barbican-9b88-account-create-update-8r9wc\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.641136 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d20415-1194-4077-9c8c-29ecf1c7e286-operator-scripts\") pod \"barbican-9b88-account-create-update-8r9wc\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.641266 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/439a5db7-3e8b-4871-b500-3ba8c6039691-operator-scripts\") pod \"barbican-db-create-m9pwb\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.641341 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xkg2\" (UniqueName: \"kubernetes.io/projected/439a5db7-3e8b-4871-b500-3ba8c6039691-kube-api-access-9xkg2\") pod \"barbican-db-create-m9pwb\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.742546 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/439a5db7-3e8b-4871-b500-3ba8c6039691-operator-scripts\") pod \"barbican-db-create-m9pwb\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.742943 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xkg2\" (UniqueName: \"kubernetes.io/projected/439a5db7-3e8b-4871-b500-3ba8c6039691-kube-api-access-9xkg2\") pod \"barbican-db-create-m9pwb\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.743090 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-487gz\" (UniqueName: \"kubernetes.io/projected/15d20415-1194-4077-9c8c-29ecf1c7e286-kube-api-access-487gz\") pod \"barbican-9b88-account-create-update-8r9wc\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.743217 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d20415-1194-4077-9c8c-29ecf1c7e286-operator-scripts\") pod \"barbican-9b88-account-create-update-8r9wc\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.743764 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/439a5db7-3e8b-4871-b500-3ba8c6039691-operator-scripts\") pod \"barbican-db-create-m9pwb\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.744106 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d20415-1194-4077-9c8c-29ecf1c7e286-operator-scripts\") pod \"barbican-9b88-account-create-update-8r9wc\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.763781 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xkg2\" (UniqueName: \"kubernetes.io/projected/439a5db7-3e8b-4871-b500-3ba8c6039691-kube-api-access-9xkg2\") pod \"barbican-db-create-m9pwb\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.763781 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-487gz\" (UniqueName: \"kubernetes.io/projected/15d20415-1194-4077-9c8c-29ecf1c7e286-kube-api-access-487gz\") pod \"barbican-9b88-account-create-update-8r9wc\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.847279 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:30 crc kubenswrapper[4811]: I0128 17:16:30.862388 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:31 crc kubenswrapper[4811]: I0128 17:16:31.288148 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9b88-account-create-update-8r9wc"] Jan 28 17:16:31 crc kubenswrapper[4811]: I0128 17:16:31.308577 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-m9pwb"] Jan 28 17:16:31 crc kubenswrapper[4811]: W0128 17:16:31.311490 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod439a5db7_3e8b_4871_b500_3ba8c6039691.slice/crio-2c89c35a0fcee6fb2db3177b32cb06cef772cb61b1db8700d37a8b0353cb28ed WatchSource:0}: Error finding container 2c89c35a0fcee6fb2db3177b32cb06cef772cb61b1db8700d37a8b0353cb28ed: Status 404 returned error can't find the container with id 2c89c35a0fcee6fb2db3177b32cb06cef772cb61b1db8700d37a8b0353cb28ed Jan 28 17:16:32 crc kubenswrapper[4811]: I0128 17:16:32.274982 4811 generic.go:334] "Generic (PLEG): container finished" podID="439a5db7-3e8b-4871-b500-3ba8c6039691" containerID="7a2f5307b315effa4abf4fcaa14875ca9250487f7bafb81d5a763989c38cc450" exitCode=0 Jan 28 17:16:32 crc kubenswrapper[4811]: I0128 17:16:32.275050 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m9pwb" event={"ID":"439a5db7-3e8b-4871-b500-3ba8c6039691","Type":"ContainerDied","Data":"7a2f5307b315effa4abf4fcaa14875ca9250487f7bafb81d5a763989c38cc450"} Jan 28 17:16:32 crc kubenswrapper[4811]: I0128 17:16:32.275077 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m9pwb" event={"ID":"439a5db7-3e8b-4871-b500-3ba8c6039691","Type":"ContainerStarted","Data":"2c89c35a0fcee6fb2db3177b32cb06cef772cb61b1db8700d37a8b0353cb28ed"} Jan 28 17:16:32 crc kubenswrapper[4811]: I0128 17:16:32.276745 4811 generic.go:334] "Generic (PLEG): container finished" podID="15d20415-1194-4077-9c8c-29ecf1c7e286" containerID="8294bd68150353c60aa38d7aed0faf462a832b2e09ba418fb5fe5cda05326494" exitCode=0 Jan 28 17:16:32 crc kubenswrapper[4811]: I0128 17:16:32.276804 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9b88-account-create-update-8r9wc" event={"ID":"15d20415-1194-4077-9c8c-29ecf1c7e286","Type":"ContainerDied","Data":"8294bd68150353c60aa38d7aed0faf462a832b2e09ba418fb5fe5cda05326494"} Jan 28 17:16:32 crc kubenswrapper[4811]: I0128 17:16:32.276831 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9b88-account-create-update-8r9wc" event={"ID":"15d20415-1194-4077-9c8c-29ecf1c7e286","Type":"ContainerStarted","Data":"adb229eaeb56032669524ab873746b33cd4d34bbf9d5e3373d2e311506901455"} Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.668541 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.682183 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.798788 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/439a5db7-3e8b-4871-b500-3ba8c6039691-operator-scripts\") pod \"439a5db7-3e8b-4871-b500-3ba8c6039691\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.798943 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-487gz\" (UniqueName: \"kubernetes.io/projected/15d20415-1194-4077-9c8c-29ecf1c7e286-kube-api-access-487gz\") pod \"15d20415-1194-4077-9c8c-29ecf1c7e286\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.799042 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d20415-1194-4077-9c8c-29ecf1c7e286-operator-scripts\") pod \"15d20415-1194-4077-9c8c-29ecf1c7e286\" (UID: \"15d20415-1194-4077-9c8c-29ecf1c7e286\") " Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.799153 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xkg2\" (UniqueName: \"kubernetes.io/projected/439a5db7-3e8b-4871-b500-3ba8c6039691-kube-api-access-9xkg2\") pod \"439a5db7-3e8b-4871-b500-3ba8c6039691\" (UID: \"439a5db7-3e8b-4871-b500-3ba8c6039691\") " Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.799600 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/439a5db7-3e8b-4871-b500-3ba8c6039691-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "439a5db7-3e8b-4871-b500-3ba8c6039691" (UID: "439a5db7-3e8b-4871-b500-3ba8c6039691"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.799728 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/439a5db7-3e8b-4871-b500-3ba8c6039691-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.799918 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15d20415-1194-4077-9c8c-29ecf1c7e286-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "15d20415-1194-4077-9c8c-29ecf1c7e286" (UID: "15d20415-1194-4077-9c8c-29ecf1c7e286"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.805860 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15d20415-1194-4077-9c8c-29ecf1c7e286-kube-api-access-487gz" (OuterVolumeSpecName: "kube-api-access-487gz") pod "15d20415-1194-4077-9c8c-29ecf1c7e286" (UID: "15d20415-1194-4077-9c8c-29ecf1c7e286"). InnerVolumeSpecName "kube-api-access-487gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.806535 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/439a5db7-3e8b-4871-b500-3ba8c6039691-kube-api-access-9xkg2" (OuterVolumeSpecName: "kube-api-access-9xkg2") pod "439a5db7-3e8b-4871-b500-3ba8c6039691" (UID: "439a5db7-3e8b-4871-b500-3ba8c6039691"). InnerVolumeSpecName "kube-api-access-9xkg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.903100 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-487gz\" (UniqueName: \"kubernetes.io/projected/15d20415-1194-4077-9c8c-29ecf1c7e286-kube-api-access-487gz\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.903133 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d20415-1194-4077-9c8c-29ecf1c7e286-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:33 crc kubenswrapper[4811]: I0128 17:16:33.903142 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xkg2\" (UniqueName: \"kubernetes.io/projected/439a5db7-3e8b-4871-b500-3ba8c6039691-kube-api-access-9xkg2\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:34 crc kubenswrapper[4811]: I0128 17:16:34.294450 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9b88-account-create-update-8r9wc" Jan 28 17:16:34 crc kubenswrapper[4811]: I0128 17:16:34.294714 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9b88-account-create-update-8r9wc" event={"ID":"15d20415-1194-4077-9c8c-29ecf1c7e286","Type":"ContainerDied","Data":"adb229eaeb56032669524ab873746b33cd4d34bbf9d5e3373d2e311506901455"} Jan 28 17:16:34 crc kubenswrapper[4811]: I0128 17:16:34.294776 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adb229eaeb56032669524ab873746b33cd4d34bbf9d5e3373d2e311506901455" Jan 28 17:16:34 crc kubenswrapper[4811]: I0128 17:16:34.295725 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m9pwb" event={"ID":"439a5db7-3e8b-4871-b500-3ba8c6039691","Type":"ContainerDied","Data":"2c89c35a0fcee6fb2db3177b32cb06cef772cb61b1db8700d37a8b0353cb28ed"} Jan 28 17:16:34 crc kubenswrapper[4811]: I0128 17:16:34.295754 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c89c35a0fcee6fb2db3177b32cb06cef772cb61b1db8700d37a8b0353cb28ed" Jan 28 17:16:34 crc kubenswrapper[4811]: I0128 17:16:34.295795 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m9pwb" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.756300 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jjg5c"] Jan 28 17:16:36 crc kubenswrapper[4811]: E0128 17:16:36.757059 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439a5db7-3e8b-4871-b500-3ba8c6039691" containerName="mariadb-database-create" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.757071 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="439a5db7-3e8b-4871-b500-3ba8c6039691" containerName="mariadb-database-create" Jan 28 17:16:36 crc kubenswrapper[4811]: E0128 17:16:36.757115 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d20415-1194-4077-9c8c-29ecf1c7e286" containerName="mariadb-account-create-update" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.757125 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d20415-1194-4077-9c8c-29ecf1c7e286" containerName="mariadb-account-create-update" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.757279 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="439a5db7-3e8b-4871-b500-3ba8c6039691" containerName="mariadb-database-create" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.757295 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d20415-1194-4077-9c8c-29ecf1c7e286" containerName="mariadb-account-create-update" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.757819 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.759983 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.761401 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-fqmjv" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.765136 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jjg5c"] Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.862413 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-db-sync-config-data\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.862584 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpnnm\" (UniqueName: \"kubernetes.io/projected/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-kube-api-access-wpnnm\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.862727 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-combined-ca-bundle\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.964290 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-db-sync-config-data\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.964387 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpnnm\" (UniqueName: \"kubernetes.io/projected/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-kube-api-access-wpnnm\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.964507 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-combined-ca-bundle\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.978424 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-combined-ca-bundle\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:36 crc kubenswrapper[4811]: I0128 17:16:36.979817 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-db-sync-config-data\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:37 crc kubenswrapper[4811]: I0128 17:16:37.005805 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpnnm\" (UniqueName: \"kubernetes.io/projected/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-kube-api-access-wpnnm\") pod \"barbican-db-sync-jjg5c\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:37 crc kubenswrapper[4811]: I0128 17:16:37.119353 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:37 crc kubenswrapper[4811]: I0128 17:16:37.570719 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jjg5c"] Jan 28 17:16:38 crc kubenswrapper[4811]: I0128 17:16:38.326154 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jjg5c" event={"ID":"1558ba0e-ed1b-48ab-9004-8f6caedf79fd","Type":"ContainerStarted","Data":"c65204501b8c7681f4ac497f191a551dc77d6e53d042e6d7ac6e4d829a8e263a"} Jan 28 17:16:38 crc kubenswrapper[4811]: I0128 17:16:38.326484 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jjg5c" event={"ID":"1558ba0e-ed1b-48ab-9004-8f6caedf79fd","Type":"ContainerStarted","Data":"0ec5c4fa3a8b140a7bebd011afd6d5380c6914f044c109f972382e13ce08a48e"} Jan 28 17:16:38 crc kubenswrapper[4811]: I0128 17:16:38.347760 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jjg5c" podStartSLOduration=2.347740732 podStartE2EDuration="2.347740732s" podCreationTimestamp="2026-01-28 17:16:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:16:38.344575436 +0000 UTC m=+5491.098939019" watchObservedRunningTime="2026-01-28 17:16:38.347740732 +0000 UTC m=+5491.102104315" Jan 28 17:16:39 crc kubenswrapper[4811]: I0128 17:16:39.349490 4811 generic.go:334] "Generic (PLEG): container finished" podID="1558ba0e-ed1b-48ab-9004-8f6caedf79fd" containerID="c65204501b8c7681f4ac497f191a551dc77d6e53d042e6d7ac6e4d829a8e263a" exitCode=0 Jan 28 17:16:39 crc kubenswrapper[4811]: I0128 17:16:39.349525 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jjg5c" event={"ID":"1558ba0e-ed1b-48ab-9004-8f6caedf79fd","Type":"ContainerDied","Data":"c65204501b8c7681f4ac497f191a551dc77d6e53d042e6d7ac6e4d829a8e263a"} Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.638716 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.737848 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-combined-ca-bundle\") pod \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.738108 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpnnm\" (UniqueName: \"kubernetes.io/projected/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-kube-api-access-wpnnm\") pod \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.738135 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-db-sync-config-data\") pod \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\" (UID: \"1558ba0e-ed1b-48ab-9004-8f6caedf79fd\") " Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.745655 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1558ba0e-ed1b-48ab-9004-8f6caedf79fd" (UID: "1558ba0e-ed1b-48ab-9004-8f6caedf79fd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.745826 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-kube-api-access-wpnnm" (OuterVolumeSpecName: "kube-api-access-wpnnm") pod "1558ba0e-ed1b-48ab-9004-8f6caedf79fd" (UID: "1558ba0e-ed1b-48ab-9004-8f6caedf79fd"). InnerVolumeSpecName "kube-api-access-wpnnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.762711 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1558ba0e-ed1b-48ab-9004-8f6caedf79fd" (UID: "1558ba0e-ed1b-48ab-9004-8f6caedf79fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.840534 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.840786 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpnnm\" (UniqueName: \"kubernetes.io/projected/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-kube-api-access-wpnnm\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:40 crc kubenswrapper[4811]: I0128 17:16:40.840863 4811 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1558ba0e-ed1b-48ab-9004-8f6caedf79fd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.367912 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jjg5c" event={"ID":"1558ba0e-ed1b-48ab-9004-8f6caedf79fd","Type":"ContainerDied","Data":"0ec5c4fa3a8b140a7bebd011afd6d5380c6914f044c109f972382e13ce08a48e"} Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.367960 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ec5c4fa3a8b140a7bebd011afd6d5380c6914f044c109f972382e13ce08a48e" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.367983 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jjg5c" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.497153 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6fb878c849-8qtsg"] Jan 28 17:16:41 crc kubenswrapper[4811]: E0128 17:16:41.497542 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1558ba0e-ed1b-48ab-9004-8f6caedf79fd" containerName="barbican-db-sync" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.497563 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1558ba0e-ed1b-48ab-9004-8f6caedf79fd" containerName="barbican-db-sync" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.497742 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1558ba0e-ed1b-48ab-9004-8f6caedf79fd" containerName="barbican-db-sync" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.498786 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.507795 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.508198 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.509047 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-fqmjv" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.523409 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-854cc966bb-g5fl8"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.524697 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.532044 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.542007 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6fb878c849-8qtsg"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.551959 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-854cc966bb-g5fl8"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.552245 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-combined-ca-bundle\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.552351 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-config-data\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.552443 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-logs\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.552554 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8fbv\" (UniqueName: \"kubernetes.io/projected/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-kube-api-access-x8fbv\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.552673 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-config-data-custom\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.615898 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8f8875695-cf5c9"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.617901 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.643558 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8f8875695-cf5c9"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654379 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-config-data\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654457 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8fbv\" (UniqueName: \"kubernetes.io/projected/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-kube-api-access-x8fbv\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654502 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ed15b45-fa16-49de-b10c-3a363f820d2e-logs\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654534 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-config-data-custom\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654574 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-combined-ca-bundle\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654613 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-config-data-custom\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654630 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t42zj\" (UniqueName: \"kubernetes.io/projected/8ed15b45-fa16-49de-b10c-3a363f820d2e-kube-api-access-t42zj\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654668 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-combined-ca-bundle\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654688 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-config-data\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.654719 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-logs\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.655200 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-logs\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.664556 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-config-data-custom\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.665089 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-config-data\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.667617 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-combined-ca-bundle\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.674350 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8fbv\" (UniqueName: \"kubernetes.io/projected/061ac65f-6ccc-4efc-bc6c-75cf3b355e8d-kube-api-access-x8fbv\") pod \"barbican-worker-6fb878c849-8qtsg\" (UID: \"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d\") " pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.720887 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-57f9b55cfb-vwvv2"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.722550 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.727799 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.746479 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57f9b55cfb-vwvv2"] Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.755839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-config-data\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.755918 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ed15b45-fa16-49de-b10c-3a363f820d2e-logs\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.755950 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-config\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.755992 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-combined-ca-bundle\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756020 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-config-data-custom\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756045 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-sb\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756092 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t42zj\" (UniqueName: \"kubernetes.io/projected/8ed15b45-fa16-49de-b10c-3a363f820d2e-kube-api-access-t42zj\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756126 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-dns-svc\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756147 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrvph\" (UniqueName: \"kubernetes.io/projected/1a3c3123-f412-424c-a561-b50d836f5f54-kube-api-access-nrvph\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756170 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-nb\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.756728 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ed15b45-fa16-49de-b10c-3a363f820d2e-logs\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.761168 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-config-data-custom\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.761497 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-combined-ca-bundle\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.774584 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ed15b45-fa16-49de-b10c-3a363f820d2e-config-data\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.780453 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t42zj\" (UniqueName: \"kubernetes.io/projected/8ed15b45-fa16-49de-b10c-3a363f820d2e-kube-api-access-t42zj\") pod \"barbican-keystone-listener-854cc966bb-g5fl8\" (UID: \"8ed15b45-fa16-49de-b10c-3a363f820d2e\") " pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.825028 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6fb878c849-8qtsg" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.842564 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.857807 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-config-data\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.857900 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbfqf\" (UniqueName: \"kubernetes.io/projected/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-kube-api-access-mbfqf\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.857943 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-config\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.857980 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-sb\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.857997 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-config-data-custom\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.858031 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-dns-svc\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.858047 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrvph\" (UniqueName: \"kubernetes.io/projected/1a3c3123-f412-424c-a561-b50d836f5f54-kube-api-access-nrvph\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.858064 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-nb\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.858082 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-combined-ca-bundle\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.858107 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-logs\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.859000 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-config\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.859537 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-sb\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.860040 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-dns-svc\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.861222 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-nb\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.879986 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrvph\" (UniqueName: \"kubernetes.io/projected/1a3c3123-f412-424c-a561-b50d836f5f54-kube-api-access-nrvph\") pod \"dnsmasq-dns-8f8875695-cf5c9\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.949964 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.959941 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-config-data-custom\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.960029 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-combined-ca-bundle\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.960068 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-logs\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.960102 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-config-data\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.960171 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbfqf\" (UniqueName: \"kubernetes.io/projected/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-kube-api-access-mbfqf\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.964387 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-logs\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.972677 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-config-data-custom\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.985886 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-combined-ca-bundle\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.986891 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-config-data\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:41 crc kubenswrapper[4811]: I0128 17:16:41.994849 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbfqf\" (UniqueName: \"kubernetes.io/projected/b0d2592a-6e6a-4415-a1ef-5b026f8306ef-kube-api-access-mbfqf\") pod \"barbican-api-57f9b55cfb-vwvv2\" (UID: \"b0d2592a-6e6a-4415-a1ef-5b026f8306ef\") " pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:42 crc kubenswrapper[4811]: I0128 17:16:42.064028 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:42 crc kubenswrapper[4811]: I0128 17:16:42.286928 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6fb878c849-8qtsg"] Jan 28 17:16:42 crc kubenswrapper[4811]: I0128 17:16:42.403938 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6fb878c849-8qtsg" event={"ID":"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d","Type":"ContainerStarted","Data":"d53abb79c7891d2001bf27b5ddbc53865c24e594e905e36f28f6fb7e530d82c4"} Jan 28 17:16:42 crc kubenswrapper[4811]: I0128 17:16:42.544739 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-854cc966bb-g5fl8"] Jan 28 17:16:42 crc kubenswrapper[4811]: I0128 17:16:42.715215 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57f9b55cfb-vwvv2"] Jan 28 17:16:42 crc kubenswrapper[4811]: W0128 17:16:42.722989 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0d2592a_6e6a_4415_a1ef_5b026f8306ef.slice/crio-be5fe2633699db0dd1f018d54f04da3877dc5b755ce52516604b7e037241fa9d WatchSource:0}: Error finding container be5fe2633699db0dd1f018d54f04da3877dc5b755ce52516604b7e037241fa9d: Status 404 returned error can't find the container with id be5fe2633699db0dd1f018d54f04da3877dc5b755ce52516604b7e037241fa9d Jan 28 17:16:42 crc kubenswrapper[4811]: I0128 17:16:42.746724 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8f8875695-cf5c9"] Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.414057 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" event={"ID":"8ed15b45-fa16-49de-b10c-3a363f820d2e","Type":"ContainerStarted","Data":"bec917918fa887211cdc53079af3cad44aa54a0c80736791dd3b734d3d8bb881"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.414543 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" event={"ID":"8ed15b45-fa16-49de-b10c-3a363f820d2e","Type":"ContainerStarted","Data":"d9654528dd2dc798d9aa6c4f16925c512815eb51fb6e1452f832e00434877cb9"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.414569 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" event={"ID":"8ed15b45-fa16-49de-b10c-3a363f820d2e","Type":"ContainerStarted","Data":"b827f6989160bc462020e18b51026112a0fe8145f8f37a8d1efed6631b213d79"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.416063 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6fb878c849-8qtsg" event={"ID":"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d","Type":"ContainerStarted","Data":"87ab9daa48bf32479907a68b12dfd909a1ddad9076c0336faaeb2b9107b19c0f"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.416103 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6fb878c849-8qtsg" event={"ID":"061ac65f-6ccc-4efc-bc6c-75cf3b355e8d","Type":"ContainerStarted","Data":"12a61e4120a1f694e5a1187de30cdfa10a9ddf1e0d2a2868cf8eab537e3d871d"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.418131 4811 generic.go:334] "Generic (PLEG): container finished" podID="1a3c3123-f412-424c-a561-b50d836f5f54" containerID="8f55702c494dd197f2608cc87c20b050ff4854178804e493a8c498efb3c19e44" exitCode=0 Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.418489 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" event={"ID":"1a3c3123-f412-424c-a561-b50d836f5f54","Type":"ContainerDied","Data":"8f55702c494dd197f2608cc87c20b050ff4854178804e493a8c498efb3c19e44"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.418529 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" event={"ID":"1a3c3123-f412-424c-a561-b50d836f5f54","Type":"ContainerStarted","Data":"6a3ea27714e0917b0b3bee3ad62f1ca1d81615da6959aa5ce9ee75bae9d3f16e"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.421526 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57f9b55cfb-vwvv2" event={"ID":"b0d2592a-6e6a-4415-a1ef-5b026f8306ef","Type":"ContainerStarted","Data":"a04db934941f3c1367e981260d31b378c79e422fc18882bd4907aed8fdca6ea3"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.421561 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57f9b55cfb-vwvv2" event={"ID":"b0d2592a-6e6a-4415-a1ef-5b026f8306ef","Type":"ContainerStarted","Data":"a40793e5d18e686da8a75d1eae6be48730bc6102f1b0b382291338415be333ad"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.421572 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57f9b55cfb-vwvv2" event={"ID":"b0d2592a-6e6a-4415-a1ef-5b026f8306ef","Type":"ContainerStarted","Data":"be5fe2633699db0dd1f018d54f04da3877dc5b755ce52516604b7e037241fa9d"} Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.421793 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.421842 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.442410 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-854cc966bb-g5fl8" podStartSLOduration=2.442384325 podStartE2EDuration="2.442384325s" podCreationTimestamp="2026-01-28 17:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:16:43.43595807 +0000 UTC m=+5496.190321673" watchObservedRunningTime="2026-01-28 17:16:43.442384325 +0000 UTC m=+5496.196747908" Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.487515 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-57f9b55cfb-vwvv2" podStartSLOduration=2.487491544 podStartE2EDuration="2.487491544s" podCreationTimestamp="2026-01-28 17:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:16:43.480335079 +0000 UTC m=+5496.234698672" watchObservedRunningTime="2026-01-28 17:16:43.487491544 +0000 UTC m=+5496.241855127" Jan 28 17:16:43 crc kubenswrapper[4811]: I0128 17:16:43.518924 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6fb878c849-8qtsg" podStartSLOduration=2.518895499 podStartE2EDuration="2.518895499s" podCreationTimestamp="2026-01-28 17:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:16:43.507380125 +0000 UTC m=+5496.261743708" watchObservedRunningTime="2026-01-28 17:16:43.518895499 +0000 UTC m=+5496.273259082" Jan 28 17:16:44 crc kubenswrapper[4811]: I0128 17:16:44.430830 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" event={"ID":"1a3c3123-f412-424c-a561-b50d836f5f54","Type":"ContainerStarted","Data":"e73ffc237d8e1392056c3e79574d66e62cd7397d6831639a05ac08a8e301462f"} Jan 28 17:16:44 crc kubenswrapper[4811]: I0128 17:16:44.431471 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:51 crc kubenswrapper[4811]: I0128 17:16:51.951780 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:16:51 crc kubenswrapper[4811]: I0128 17:16:51.972303 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" podStartSLOduration=10.972283397 podStartE2EDuration="10.972283397s" podCreationTimestamp="2026-01-28 17:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:16:44.457263926 +0000 UTC m=+5497.211627509" watchObservedRunningTime="2026-01-28 17:16:51.972283397 +0000 UTC m=+5504.726646970" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.009177 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79ddd5b787-8bg8x"] Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.009462 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerName="dnsmasq-dns" containerID="cri-o://87e6965928cc872303d4d01c5837025a5bb31e2d0f9d3789d3680acafc5a787b" gracePeriod=10 Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.508629 4811 generic.go:334] "Generic (PLEG): container finished" podID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerID="87e6965928cc872303d4d01c5837025a5bb31e2d0f9d3789d3680acafc5a787b" exitCode=0 Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.508716 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" event={"ID":"cbc96676-d50e-495e-9109-d4db0c7f6af9","Type":"ContainerDied","Data":"87e6965928cc872303d4d01c5837025a5bb31e2d0f9d3789d3680acafc5a787b"} Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.508984 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" event={"ID":"cbc96676-d50e-495e-9109-d4db0c7f6af9","Type":"ContainerDied","Data":"09c10a4e0369007fa254713383581cbcf3d75dd203fc684c63978615379324e0"} Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.509008 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09c10a4e0369007fa254713383581cbcf3d75dd203fc684c63978615379324e0" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.526310 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.647891 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-dns-svc\") pod \"cbc96676-d50e-495e-9109-d4db0c7f6af9\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.648190 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-config\") pod \"cbc96676-d50e-495e-9109-d4db0c7f6af9\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.648278 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdjf7\" (UniqueName: \"kubernetes.io/projected/cbc96676-d50e-495e-9109-d4db0c7f6af9-kube-api-access-fdjf7\") pod \"cbc96676-d50e-495e-9109-d4db0c7f6af9\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.648391 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-nb\") pod \"cbc96676-d50e-495e-9109-d4db0c7f6af9\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.648425 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-sb\") pod \"cbc96676-d50e-495e-9109-d4db0c7f6af9\" (UID: \"cbc96676-d50e-495e-9109-d4db0c7f6af9\") " Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.656993 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbc96676-d50e-495e-9109-d4db0c7f6af9-kube-api-access-fdjf7" (OuterVolumeSpecName: "kube-api-access-fdjf7") pod "cbc96676-d50e-495e-9109-d4db0c7f6af9" (UID: "cbc96676-d50e-495e-9109-d4db0c7f6af9"). InnerVolumeSpecName "kube-api-access-fdjf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.695476 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cbc96676-d50e-495e-9109-d4db0c7f6af9" (UID: "cbc96676-d50e-495e-9109-d4db0c7f6af9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.698491 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cbc96676-d50e-495e-9109-d4db0c7f6af9" (UID: "cbc96676-d50e-495e-9109-d4db0c7f6af9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.711464 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cbc96676-d50e-495e-9109-d4db0c7f6af9" (UID: "cbc96676-d50e-495e-9109-d4db0c7f6af9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.712348 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-config" (OuterVolumeSpecName: "config") pod "cbc96676-d50e-495e-9109-d4db0c7f6af9" (UID: "cbc96676-d50e-495e-9109-d4db0c7f6af9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.750192 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.750228 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdjf7\" (UniqueName: \"kubernetes.io/projected/cbc96676-d50e-495e-9109-d4db0c7f6af9-kube-api-access-fdjf7\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.750241 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.750251 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:52 crc kubenswrapper[4811]: I0128 17:16:52.750262 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbc96676-d50e-495e-9109-d4db0c7f6af9-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:16:53 crc kubenswrapper[4811]: I0128 17:16:53.516365 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79ddd5b787-8bg8x" Jan 28 17:16:53 crc kubenswrapper[4811]: I0128 17:16:53.551274 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79ddd5b787-8bg8x"] Jan 28 17:16:53 crc kubenswrapper[4811]: I0128 17:16:53.558867 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79ddd5b787-8bg8x"] Jan 28 17:16:53 crc kubenswrapper[4811]: I0128 17:16:53.586520 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:53 crc kubenswrapper[4811]: I0128 17:16:53.687731 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57f9b55cfb-vwvv2" Jan 28 17:16:54 crc kubenswrapper[4811]: I0128 17:16:54.356737 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" path="/var/lib/kubelet/pods/cbc96676-d50e-495e-9109-d4db0c7f6af9/volumes" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.012868 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-z7rnj"] Jan 28 17:17:05 crc kubenswrapper[4811]: E0128 17:17:05.014917 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerName="init" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.015020 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerName="init" Jan 28 17:17:05 crc kubenswrapper[4811]: E0128 17:17:05.015104 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerName="dnsmasq-dns" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.015176 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerName="dnsmasq-dns" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.015518 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbc96676-d50e-495e-9109-d4db0c7f6af9" containerName="dnsmasq-dns" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.016311 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.021468 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z7rnj"] Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.076044 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-operator-scripts\") pod \"neutron-db-create-z7rnj\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.080902 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knvkl\" (UniqueName: \"kubernetes.io/projected/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-kube-api-access-knvkl\") pod \"neutron-db-create-z7rnj\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.112320 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f1b2-account-create-update-td8jl"] Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.115755 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.117675 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.122865 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f1b2-account-create-update-td8jl"] Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.183137 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh9x4\" (UniqueName: \"kubernetes.io/projected/13ae10f7-7978-4310-94ff-784e15f4b9ee-kube-api-access-sh9x4\") pod \"neutron-f1b2-account-create-update-td8jl\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.183215 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-operator-scripts\") pod \"neutron-db-create-z7rnj\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.183522 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ae10f7-7978-4310-94ff-784e15f4b9ee-operator-scripts\") pod \"neutron-f1b2-account-create-update-td8jl\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.183702 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knvkl\" (UniqueName: \"kubernetes.io/projected/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-kube-api-access-knvkl\") pod \"neutron-db-create-z7rnj\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.184072 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-operator-scripts\") pod \"neutron-db-create-z7rnj\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.206398 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knvkl\" (UniqueName: \"kubernetes.io/projected/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-kube-api-access-knvkl\") pod \"neutron-db-create-z7rnj\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.285416 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ae10f7-7978-4310-94ff-784e15f4b9ee-operator-scripts\") pod \"neutron-f1b2-account-create-update-td8jl\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.285870 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh9x4\" (UniqueName: \"kubernetes.io/projected/13ae10f7-7978-4310-94ff-784e15f4b9ee-kube-api-access-sh9x4\") pod \"neutron-f1b2-account-create-update-td8jl\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.286710 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ae10f7-7978-4310-94ff-784e15f4b9ee-operator-scripts\") pod \"neutron-f1b2-account-create-update-td8jl\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.304279 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh9x4\" (UniqueName: \"kubernetes.io/projected/13ae10f7-7978-4310-94ff-784e15f4b9ee-kube-api-access-sh9x4\") pod \"neutron-f1b2-account-create-update-td8jl\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.334297 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.430597 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.900806 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f1b2-account-create-update-td8jl"] Jan 28 17:17:05 crc kubenswrapper[4811]: W0128 17:17:05.904356 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b1ed9d8_7d6d_4411_9ff4_1723f1eaaf89.slice/crio-3f24e7c3be216862d41550547ce43e1de1f264de63ffbc216871e11d9686b77e WatchSource:0}: Error finding container 3f24e7c3be216862d41550547ce43e1de1f264de63ffbc216871e11d9686b77e: Status 404 returned error can't find the container with id 3f24e7c3be216862d41550547ce43e1de1f264de63ffbc216871e11d9686b77e Jan 28 17:17:05 crc kubenswrapper[4811]: I0128 17:17:05.910557 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z7rnj"] Jan 28 17:17:06 crc kubenswrapper[4811]: I0128 17:17:06.629369 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7rnj" event={"ID":"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89","Type":"ContainerStarted","Data":"369aa68a0a0a8a7a80bbef37cb9b71506e789c078066bbd3ddcc848f59a96f17"} Jan 28 17:17:06 crc kubenswrapper[4811]: I0128 17:17:06.629791 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7rnj" event={"ID":"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89","Type":"ContainerStarted","Data":"3f24e7c3be216862d41550547ce43e1de1f264de63ffbc216871e11d9686b77e"} Jan 28 17:17:06 crc kubenswrapper[4811]: I0128 17:17:06.631833 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1b2-account-create-update-td8jl" event={"ID":"13ae10f7-7978-4310-94ff-784e15f4b9ee","Type":"ContainerStarted","Data":"c3e624b34fc9af6cb8442bd1d866e3967690ec6283ecb05443400dc263e60b8f"} Jan 28 17:17:06 crc kubenswrapper[4811]: I0128 17:17:06.631864 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1b2-account-create-update-td8jl" event={"ID":"13ae10f7-7978-4310-94ff-784e15f4b9ee","Type":"ContainerStarted","Data":"482d920343a9378e2a2ccdb664f1a4d38d2c8c3ad74612b0442c67d20f03cb51"} Jan 28 17:17:06 crc kubenswrapper[4811]: I0128 17:17:06.650374 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-z7rnj" podStartSLOduration=2.650352065 podStartE2EDuration="2.650352065s" podCreationTimestamp="2026-01-28 17:17:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:17:06.643292893 +0000 UTC m=+5519.397656476" watchObservedRunningTime="2026-01-28 17:17:06.650352065 +0000 UTC m=+5519.404715648" Jan 28 17:17:06 crc kubenswrapper[4811]: I0128 17:17:06.667013 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-f1b2-account-create-update-td8jl" podStartSLOduration=1.666990448 podStartE2EDuration="1.666990448s" podCreationTimestamp="2026-01-28 17:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:17:06.660381639 +0000 UTC m=+5519.414745222" watchObservedRunningTime="2026-01-28 17:17:06.666990448 +0000 UTC m=+5519.421354031" Jan 28 17:17:07 crc kubenswrapper[4811]: I0128 17:17:07.641944 4811 generic.go:334] "Generic (PLEG): container finished" podID="13ae10f7-7978-4310-94ff-784e15f4b9ee" containerID="c3e624b34fc9af6cb8442bd1d866e3967690ec6283ecb05443400dc263e60b8f" exitCode=0 Jan 28 17:17:07 crc kubenswrapper[4811]: I0128 17:17:07.642052 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1b2-account-create-update-td8jl" event={"ID":"13ae10f7-7978-4310-94ff-784e15f4b9ee","Type":"ContainerDied","Data":"c3e624b34fc9af6cb8442bd1d866e3967690ec6283ecb05443400dc263e60b8f"} Jan 28 17:17:07 crc kubenswrapper[4811]: I0128 17:17:07.643920 4811 generic.go:334] "Generic (PLEG): container finished" podID="3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" containerID="369aa68a0a0a8a7a80bbef37cb9b71506e789c078066bbd3ddcc848f59a96f17" exitCode=0 Jan 28 17:17:07 crc kubenswrapper[4811]: I0128 17:17:07.643961 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7rnj" event={"ID":"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89","Type":"ContainerDied","Data":"369aa68a0a0a8a7a80bbef37cb9b71506e789c078066bbd3ddcc848f59a96f17"} Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.098939 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.104896 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.146927 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knvkl\" (UniqueName: \"kubernetes.io/projected/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-kube-api-access-knvkl\") pod \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.146986 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ae10f7-7978-4310-94ff-784e15f4b9ee-operator-scripts\") pod \"13ae10f7-7978-4310-94ff-784e15f4b9ee\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.147047 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh9x4\" (UniqueName: \"kubernetes.io/projected/13ae10f7-7978-4310-94ff-784e15f4b9ee-kube-api-access-sh9x4\") pod \"13ae10f7-7978-4310-94ff-784e15f4b9ee\" (UID: \"13ae10f7-7978-4310-94ff-784e15f4b9ee\") " Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.147085 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-operator-scripts\") pod \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\" (UID: \"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89\") " Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.147513 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13ae10f7-7978-4310-94ff-784e15f4b9ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13ae10f7-7978-4310-94ff-784e15f4b9ee" (UID: "13ae10f7-7978-4310-94ff-784e15f4b9ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.147691 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" (UID: "3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.152516 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13ae10f7-7978-4310-94ff-784e15f4b9ee-kube-api-access-sh9x4" (OuterVolumeSpecName: "kube-api-access-sh9x4") pod "13ae10f7-7978-4310-94ff-784e15f4b9ee" (UID: "13ae10f7-7978-4310-94ff-784e15f4b9ee"). InnerVolumeSpecName "kube-api-access-sh9x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.152561 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-kube-api-access-knvkl" (OuterVolumeSpecName: "kube-api-access-knvkl") pod "3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" (UID: "3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89"). InnerVolumeSpecName "kube-api-access-knvkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.249138 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh9x4\" (UniqueName: \"kubernetes.io/projected/13ae10f7-7978-4310-94ff-784e15f4b9ee-kube-api-access-sh9x4\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.249214 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.249238 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knvkl\" (UniqueName: \"kubernetes.io/projected/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89-kube-api-access-knvkl\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.249259 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ae10f7-7978-4310-94ff-784e15f4b9ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.661011 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7rnj" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.661041 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7rnj" event={"ID":"3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89","Type":"ContainerDied","Data":"3f24e7c3be216862d41550547ce43e1de1f264de63ffbc216871e11d9686b77e"} Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.661480 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f24e7c3be216862d41550547ce43e1de1f264de63ffbc216871e11d9686b77e" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.663131 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1b2-account-create-update-td8jl" event={"ID":"13ae10f7-7978-4310-94ff-784e15f4b9ee","Type":"ContainerDied","Data":"482d920343a9378e2a2ccdb664f1a4d38d2c8c3ad74612b0442c67d20f03cb51"} Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.663165 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1b2-account-create-update-td8jl" Jan 28 17:17:09 crc kubenswrapper[4811]: I0128 17:17:09.663181 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="482d920343a9378e2a2ccdb664f1a4d38d2c8c3ad74612b0442c67d20f03cb51" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.253379 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-5fgn8"] Jan 28 17:17:15 crc kubenswrapper[4811]: E0128 17:17:15.254407 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ae10f7-7978-4310-94ff-784e15f4b9ee" containerName="mariadb-account-create-update" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.254443 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ae10f7-7978-4310-94ff-784e15f4b9ee" containerName="mariadb-account-create-update" Jan 28 17:17:15 crc kubenswrapper[4811]: E0128 17:17:15.254457 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" containerName="mariadb-database-create" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.254465 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" containerName="mariadb-database-create" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.254649 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" containerName="mariadb-database-create" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.254677 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ae10f7-7978-4310-94ff-784e15f4b9ee" containerName="mariadb-account-create-update" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.255351 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.258377 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.258997 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.259272 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pp7r8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.261338 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5fgn8"] Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.347489 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-config\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.347641 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-combined-ca-bundle\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.347733 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lspgt\" (UniqueName: \"kubernetes.io/projected/76e23342-294c-42ca-afa8-ea18f4db3eca-kube-api-access-lspgt\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.450031 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-config\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.450119 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-combined-ca-bundle\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.451050 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lspgt\" (UniqueName: \"kubernetes.io/projected/76e23342-294c-42ca-afa8-ea18f4db3eca-kube-api-access-lspgt\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.456352 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-config\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.467613 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-combined-ca-bundle\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.471222 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lspgt\" (UniqueName: \"kubernetes.io/projected/76e23342-294c-42ca-afa8-ea18f4db3eca-kube-api-access-lspgt\") pod \"neutron-db-sync-5fgn8\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:15 crc kubenswrapper[4811]: I0128 17:17:15.579024 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:16 crc kubenswrapper[4811]: I0128 17:17:16.024198 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5fgn8"] Jan 28 17:17:16 crc kubenswrapper[4811]: I0128 17:17:16.720377 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5fgn8" event={"ID":"76e23342-294c-42ca-afa8-ea18f4db3eca","Type":"ContainerStarted","Data":"a5cc4119707b0e0d37d6e992aa17c7f46c1a75ce4b82fbe38756c83ebb8a2970"} Jan 28 17:17:16 crc kubenswrapper[4811]: I0128 17:17:16.720450 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5fgn8" event={"ID":"76e23342-294c-42ca-afa8-ea18f4db3eca","Type":"ContainerStarted","Data":"fd4a85116dab93f2aa60272424bd16d956334ee450c7645027be9b64a1db0703"} Jan 28 17:17:16 crc kubenswrapper[4811]: I0128 17:17:16.753983 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-5fgn8" podStartSLOduration=1.753956848 podStartE2EDuration="1.753956848s" podCreationTimestamp="2026-01-28 17:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:17:16.736080471 +0000 UTC m=+5529.490444094" watchObservedRunningTime="2026-01-28 17:17:16.753956848 +0000 UTC m=+5529.508320441" Jan 28 17:17:20 crc kubenswrapper[4811]: I0128 17:17:20.752038 4811 generic.go:334] "Generic (PLEG): container finished" podID="76e23342-294c-42ca-afa8-ea18f4db3eca" containerID="a5cc4119707b0e0d37d6e992aa17c7f46c1a75ce4b82fbe38756c83ebb8a2970" exitCode=0 Jan 28 17:17:20 crc kubenswrapper[4811]: I0128 17:17:20.752304 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5fgn8" event={"ID":"76e23342-294c-42ca-afa8-ea18f4db3eca","Type":"ContainerDied","Data":"a5cc4119707b0e0d37d6e992aa17c7f46c1a75ce4b82fbe38756c83ebb8a2970"} Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.033789 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.175804 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-config\") pod \"76e23342-294c-42ca-afa8-ea18f4db3eca\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.175880 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-combined-ca-bundle\") pod \"76e23342-294c-42ca-afa8-ea18f4db3eca\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.175947 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lspgt\" (UniqueName: \"kubernetes.io/projected/76e23342-294c-42ca-afa8-ea18f4db3eca-kube-api-access-lspgt\") pod \"76e23342-294c-42ca-afa8-ea18f4db3eca\" (UID: \"76e23342-294c-42ca-afa8-ea18f4db3eca\") " Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.182620 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76e23342-294c-42ca-afa8-ea18f4db3eca-kube-api-access-lspgt" (OuterVolumeSpecName: "kube-api-access-lspgt") pod "76e23342-294c-42ca-afa8-ea18f4db3eca" (UID: "76e23342-294c-42ca-afa8-ea18f4db3eca"). InnerVolumeSpecName "kube-api-access-lspgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.199614 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-config" (OuterVolumeSpecName: "config") pod "76e23342-294c-42ca-afa8-ea18f4db3eca" (UID: "76e23342-294c-42ca-afa8-ea18f4db3eca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.208763 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76e23342-294c-42ca-afa8-ea18f4db3eca" (UID: "76e23342-294c-42ca-afa8-ea18f4db3eca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.277682 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.277728 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e23342-294c-42ca-afa8-ea18f4db3eca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.277744 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lspgt\" (UniqueName: \"kubernetes.io/projected/76e23342-294c-42ca-afa8-ea18f4db3eca-kube-api-access-lspgt\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.774669 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5fgn8" event={"ID":"76e23342-294c-42ca-afa8-ea18f4db3eca","Type":"ContainerDied","Data":"fd4a85116dab93f2aa60272424bd16d956334ee450c7645027be9b64a1db0703"} Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.775013 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd4a85116dab93f2aa60272424bd16d956334ee450c7645027be9b64a1db0703" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.774751 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5fgn8" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.994876 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbc7f9489-jbcqx"] Jan 28 17:17:22 crc kubenswrapper[4811]: E0128 17:17:22.995235 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76e23342-294c-42ca-afa8-ea18f4db3eca" containerName="neutron-db-sync" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.995251 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="76e23342-294c-42ca-afa8-ea18f4db3eca" containerName="neutron-db-sync" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.995423 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="76e23342-294c-42ca-afa8-ea18f4db3eca" containerName="neutron-db-sync" Jan 28 17:17:22 crc kubenswrapper[4811]: I0128 17:17:22.996337 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.010981 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbc7f9489-jbcqx"] Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.091212 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7f987646bf-7rgxf"] Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.093348 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.093705 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-dns-svc\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.093806 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rsdk\" (UniqueName: \"kubernetes.io/projected/fde783c7-bbcb-4005-9507-0610eb14ba6a-kube-api-access-6rsdk\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.093863 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-sb\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.093898 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-nb\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.093920 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-config\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.096676 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pp7r8" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.097092 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.097659 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.111493 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f987646bf-7rgxf"] Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.195839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rsdk\" (UniqueName: \"kubernetes.io/projected/fde783c7-bbcb-4005-9507-0610eb14ba6a-kube-api-access-6rsdk\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.195895 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-sb\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.195926 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ql9b\" (UniqueName: \"kubernetes.io/projected/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-kube-api-access-9ql9b\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.195965 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-nb\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.195993 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-config\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.196914 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-sb\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.196930 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-config\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.196971 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-dns-svc\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.197025 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-combined-ca-bundle\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.197059 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-httpd-config\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.197062 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-nb\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.197121 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-config\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.197586 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-dns-svc\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.215513 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rsdk\" (UniqueName: \"kubernetes.io/projected/fde783c7-bbcb-4005-9507-0610eb14ba6a-kube-api-access-6rsdk\") pod \"dnsmasq-dns-bbc7f9489-jbcqx\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.299879 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ql9b\" (UniqueName: \"kubernetes.io/projected/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-kube-api-access-9ql9b\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.300047 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-combined-ca-bundle\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.300092 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-httpd-config\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.300128 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-config\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.310502 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-combined-ca-bundle\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.317214 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.318809 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-httpd-config\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.327884 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-config\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.339310 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ql9b\" (UniqueName: \"kubernetes.io/projected/9a726314-a1bb-4a46-8cee-03d7cbe8e9fb-kube-api-access-9ql9b\") pod \"neutron-7f987646bf-7rgxf\" (UID: \"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb\") " pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.409045 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:23 crc kubenswrapper[4811]: I0128 17:17:23.838929 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbc7f9489-jbcqx"] Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.047227 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f987646bf-7rgxf"] Jan 28 17:17:24 crc kubenswrapper[4811]: W0128 17:17:24.112090 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a726314_a1bb_4a46_8cee_03d7cbe8e9fb.slice/crio-3c2668ac1bc76e983fef59ee3242e1587c66eac2dbbc90579c18509e66ff962a WatchSource:0}: Error finding container 3c2668ac1bc76e983fef59ee3242e1587c66eac2dbbc90579c18509e66ff962a: Status 404 returned error can't find the container with id 3c2668ac1bc76e983fef59ee3242e1587c66eac2dbbc90579c18509e66ff962a Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.792172 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f987646bf-7rgxf" event={"ID":"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb","Type":"ContainerStarted","Data":"241e611e1cf4e93bddcad2034162dbea824582cc22a46266f5e1ca7a7e3ff24c"} Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.792515 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f987646bf-7rgxf" event={"ID":"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb","Type":"ContainerStarted","Data":"f76286598aa1c739f176c1075b07fba2056fb35dc4b270b80280b4e0c030f4ab"} Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.792529 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f987646bf-7rgxf" event={"ID":"9a726314-a1bb-4a46-8cee-03d7cbe8e9fb","Type":"ContainerStarted","Data":"3c2668ac1bc76e983fef59ee3242e1587c66eac2dbbc90579c18509e66ff962a"} Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.794875 4811 generic.go:334] "Generic (PLEG): container finished" podID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerID="201d1e26dc391317a26f2a3e2673730e2449ec83e3aa362906b130802156941b" exitCode=0 Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.795693 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.795766 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" event={"ID":"fde783c7-bbcb-4005-9507-0610eb14ba6a","Type":"ContainerDied","Data":"201d1e26dc391317a26f2a3e2673730e2449ec83e3aa362906b130802156941b"} Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.795793 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" event={"ID":"fde783c7-bbcb-4005-9507-0610eb14ba6a","Type":"ContainerStarted","Data":"78a047e90cdd76085180fda35e0377841abfc696e11e908744150f7609b4fb55"} Jan 28 17:17:24 crc kubenswrapper[4811]: I0128 17:17:24.817911 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7f987646bf-7rgxf" podStartSLOduration=1.8178866390000001 podStartE2EDuration="1.817886639s" podCreationTimestamp="2026-01-28 17:17:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:17:24.812384779 +0000 UTC m=+5537.566748382" watchObservedRunningTime="2026-01-28 17:17:24.817886639 +0000 UTC m=+5537.572250242" Jan 28 17:17:25 crc kubenswrapper[4811]: I0128 17:17:25.807878 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" event={"ID":"fde783c7-bbcb-4005-9507-0610eb14ba6a","Type":"ContainerStarted","Data":"78f89a47d530238bc1e232d5499b43537ded37543ea9a3f7f0fad2b5e1e27318"} Jan 28 17:17:25 crc kubenswrapper[4811]: I0128 17:17:25.808380 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:25 crc kubenswrapper[4811]: I0128 17:17:25.848257 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" podStartSLOduration=3.84822759 podStartE2EDuration="3.84822759s" podCreationTimestamp="2026-01-28 17:17:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:17:25.838655139 +0000 UTC m=+5538.593018732" watchObservedRunningTime="2026-01-28 17:17:25.84822759 +0000 UTC m=+5538.602591213" Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.319577 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.371759 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8f8875695-cf5c9"] Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.372005 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" containerName="dnsmasq-dns" containerID="cri-o://e73ffc237d8e1392056c3e79574d66e62cd7397d6831639a05ac08a8e301462f" gracePeriod=10 Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.869962 4811 generic.go:334] "Generic (PLEG): container finished" podID="1a3c3123-f412-424c-a561-b50d836f5f54" containerID="e73ffc237d8e1392056c3e79574d66e62cd7397d6831639a05ac08a8e301462f" exitCode=0 Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.870031 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" event={"ID":"1a3c3123-f412-424c-a561-b50d836f5f54","Type":"ContainerDied","Data":"e73ffc237d8e1392056c3e79574d66e62cd7397d6831639a05ac08a8e301462f"} Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.870474 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" event={"ID":"1a3c3123-f412-424c-a561-b50d836f5f54","Type":"ContainerDied","Data":"6a3ea27714e0917b0b3bee3ad62f1ca1d81615da6959aa5ce9ee75bae9d3f16e"} Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.870487 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a3ea27714e0917b0b3bee3ad62f1ca1d81615da6959aa5ce9ee75bae9d3f16e" Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.912439 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.996076 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-dns-svc\") pod \"1a3c3123-f412-424c-a561-b50d836f5f54\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.996128 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrvph\" (UniqueName: \"kubernetes.io/projected/1a3c3123-f412-424c-a561-b50d836f5f54-kube-api-access-nrvph\") pod \"1a3c3123-f412-424c-a561-b50d836f5f54\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.997038 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-config\") pod \"1a3c3123-f412-424c-a561-b50d836f5f54\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.997073 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-sb\") pod \"1a3c3123-f412-424c-a561-b50d836f5f54\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " Jan 28 17:17:33 crc kubenswrapper[4811]: I0128 17:17:33.997126 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-nb\") pod \"1a3c3123-f412-424c-a561-b50d836f5f54\" (UID: \"1a3c3123-f412-424c-a561-b50d836f5f54\") " Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.001693 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a3c3123-f412-424c-a561-b50d836f5f54-kube-api-access-nrvph" (OuterVolumeSpecName: "kube-api-access-nrvph") pod "1a3c3123-f412-424c-a561-b50d836f5f54" (UID: "1a3c3123-f412-424c-a561-b50d836f5f54"). InnerVolumeSpecName "kube-api-access-nrvph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.033237 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a3c3123-f412-424c-a561-b50d836f5f54" (UID: "1a3c3123-f412-424c-a561-b50d836f5f54"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.037387 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a3c3123-f412-424c-a561-b50d836f5f54" (UID: "1a3c3123-f412-424c-a561-b50d836f5f54"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.049615 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a3c3123-f412-424c-a561-b50d836f5f54" (UID: "1a3c3123-f412-424c-a561-b50d836f5f54"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.054246 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-config" (OuterVolumeSpecName: "config") pod "1a3c3123-f412-424c-a561-b50d836f5f54" (UID: "1a3c3123-f412-424c-a561-b50d836f5f54"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.099252 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.099284 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.099298 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrvph\" (UniqueName: \"kubernetes.io/projected/1a3c3123-f412-424c-a561-b50d836f5f54-kube-api-access-nrvph\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.099310 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.099318 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a3c3123-f412-424c-a561-b50d836f5f54-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.878016 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8f8875695-cf5c9" Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.904709 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8f8875695-cf5c9"] Jan 28 17:17:34 crc kubenswrapper[4811]: I0128 17:17:34.914034 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8f8875695-cf5c9"] Jan 28 17:17:36 crc kubenswrapper[4811]: I0128 17:17:36.353956 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" path="/var/lib/kubelet/pods/1a3c3123-f412-424c-a561-b50d836f5f54/volumes" Jan 28 17:17:53 crc kubenswrapper[4811]: I0128 17:17:53.427888 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7f987646bf-7rgxf" Jan 28 17:17:59 crc kubenswrapper[4811]: I0128 17:17:59.907227 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-t4fqr"] Jan 28 17:17:59 crc kubenswrapper[4811]: E0128 17:17:59.908071 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" containerName="init" Jan 28 17:17:59 crc kubenswrapper[4811]: I0128 17:17:59.908085 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" containerName="init" Jan 28 17:17:59 crc kubenswrapper[4811]: E0128 17:17:59.908119 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" containerName="dnsmasq-dns" Jan 28 17:17:59 crc kubenswrapper[4811]: I0128 17:17:59.908127 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" containerName="dnsmasq-dns" Jan 28 17:17:59 crc kubenswrapper[4811]: I0128 17:17:59.908288 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a3c3123-f412-424c-a561-b50d836f5f54" containerName="dnsmasq-dns" Jan 28 17:17:59 crc kubenswrapper[4811]: I0128 17:17:59.909018 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4fqr" Jan 28 17:17:59 crc kubenswrapper[4811]: I0128 17:17:59.916831 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t4fqr"] Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.010378 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-b227-account-create-update-74djd"] Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.011775 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.017202 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.020911 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-b227-account-create-update-74djd"] Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.026035 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxkmp\" (UniqueName: \"kubernetes.io/projected/79b975a8-25eb-4580-b8d0-786b2efe59b0-kube-api-access-dxkmp\") pod \"glance-db-create-t4fqr\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.026160 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79b975a8-25eb-4580-b8d0-786b2efe59b0-operator-scripts\") pod \"glance-db-create-t4fqr\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.127313 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79b975a8-25eb-4580-b8d0-786b2efe59b0-operator-scripts\") pod \"glance-db-create-t4fqr\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.127481 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxkmp\" (UniqueName: \"kubernetes.io/projected/79b975a8-25eb-4580-b8d0-786b2efe59b0-kube-api-access-dxkmp\") pod \"glance-db-create-t4fqr\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.127575 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b27h9\" (UniqueName: \"kubernetes.io/projected/afaa6d92-7733-4a6e-92d5-860e63786a16-kube-api-access-b27h9\") pod \"glance-b227-account-create-update-74djd\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.127625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afaa6d92-7733-4a6e-92d5-860e63786a16-operator-scripts\") pod \"glance-b227-account-create-update-74djd\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.128078 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79b975a8-25eb-4580-b8d0-786b2efe59b0-operator-scripts\") pod \"glance-db-create-t4fqr\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.155265 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxkmp\" (UniqueName: \"kubernetes.io/projected/79b975a8-25eb-4580-b8d0-786b2efe59b0-kube-api-access-dxkmp\") pod \"glance-db-create-t4fqr\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.229418 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b27h9\" (UniqueName: \"kubernetes.io/projected/afaa6d92-7733-4a6e-92d5-860e63786a16-kube-api-access-b27h9\") pod \"glance-b227-account-create-update-74djd\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.229578 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afaa6d92-7733-4a6e-92d5-860e63786a16-operator-scripts\") pod \"glance-b227-account-create-update-74djd\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.230374 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afaa6d92-7733-4a6e-92d5-860e63786a16-operator-scripts\") pod \"glance-b227-account-create-update-74djd\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.246798 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b27h9\" (UniqueName: \"kubernetes.io/projected/afaa6d92-7733-4a6e-92d5-860e63786a16-kube-api-access-b27h9\") pod \"glance-b227-account-create-update-74djd\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.292187 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.327006 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.796175 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t4fqr"] Jan 28 17:18:00 crc kubenswrapper[4811]: I0128 17:18:00.848398 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-b227-account-create-update-74djd"] Jan 28 17:18:01 crc kubenswrapper[4811]: I0128 17:18:01.102211 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b227-account-create-update-74djd" event={"ID":"afaa6d92-7733-4a6e-92d5-860e63786a16","Type":"ContainerStarted","Data":"36a7ab5f4dfcb0bd6c4af9a30d7c3d2d0a826de22e2a78c1b89b06f38ce51223"} Jan 28 17:18:01 crc kubenswrapper[4811]: I0128 17:18:01.102638 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b227-account-create-update-74djd" event={"ID":"afaa6d92-7733-4a6e-92d5-860e63786a16","Type":"ContainerStarted","Data":"6844f10ced87822462592a1c22f799b592cf054afe975255734a4047ea276f41"} Jan 28 17:18:01 crc kubenswrapper[4811]: I0128 17:18:01.103513 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4fqr" event={"ID":"79b975a8-25eb-4580-b8d0-786b2efe59b0","Type":"ContainerStarted","Data":"1e1733d378109a00a97c562acd9b732f488471f80b7f3d012d93184f9c7b6e2a"} Jan 28 17:18:01 crc kubenswrapper[4811]: I0128 17:18:01.103584 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4fqr" event={"ID":"79b975a8-25eb-4580-b8d0-786b2efe59b0","Type":"ContainerStarted","Data":"50a9038913bc5247f8005ac48d0e80db33cce9bf4add7eff128d2e33bac062c0"} Jan 28 17:18:01 crc kubenswrapper[4811]: I0128 17:18:01.147654 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-b227-account-create-update-74djd" podStartSLOduration=2.147628991 podStartE2EDuration="2.147628991s" podCreationTimestamp="2026-01-28 17:17:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:01.139908192 +0000 UTC m=+5573.894271775" watchObservedRunningTime="2026-01-28 17:18:01.147628991 +0000 UTC m=+5573.901992584" Jan 28 17:18:01 crc kubenswrapper[4811]: I0128 17:18:01.161789 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-t4fqr" podStartSLOduration=2.161762176 podStartE2EDuration="2.161762176s" podCreationTimestamp="2026-01-28 17:17:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:01.154888349 +0000 UTC m=+5573.909251942" watchObservedRunningTime="2026-01-28 17:18:01.161762176 +0000 UTC m=+5573.916125759" Jan 28 17:18:02 crc kubenswrapper[4811]: I0128 17:18:02.114784 4811 generic.go:334] "Generic (PLEG): container finished" podID="afaa6d92-7733-4a6e-92d5-860e63786a16" containerID="36a7ab5f4dfcb0bd6c4af9a30d7c3d2d0a826de22e2a78c1b89b06f38ce51223" exitCode=0 Jan 28 17:18:02 crc kubenswrapper[4811]: I0128 17:18:02.114889 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b227-account-create-update-74djd" event={"ID":"afaa6d92-7733-4a6e-92d5-860e63786a16","Type":"ContainerDied","Data":"36a7ab5f4dfcb0bd6c4af9a30d7c3d2d0a826de22e2a78c1b89b06f38ce51223"} Jan 28 17:18:02 crc kubenswrapper[4811]: I0128 17:18:02.116769 4811 generic.go:334] "Generic (PLEG): container finished" podID="79b975a8-25eb-4580-b8d0-786b2efe59b0" containerID="1e1733d378109a00a97c562acd9b732f488471f80b7f3d012d93184f9c7b6e2a" exitCode=0 Jan 28 17:18:02 crc kubenswrapper[4811]: I0128 17:18:02.116808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4fqr" event={"ID":"79b975a8-25eb-4580-b8d0-786b2efe59b0","Type":"ContainerDied","Data":"1e1733d378109a00a97c562acd9b732f488471f80b7f3d012d93184f9c7b6e2a"} Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.086959 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.088310 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.569237 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.574299 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.698422 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afaa6d92-7733-4a6e-92d5-860e63786a16-operator-scripts\") pod \"afaa6d92-7733-4a6e-92d5-860e63786a16\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.698494 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b27h9\" (UniqueName: \"kubernetes.io/projected/afaa6d92-7733-4a6e-92d5-860e63786a16-kube-api-access-b27h9\") pod \"afaa6d92-7733-4a6e-92d5-860e63786a16\" (UID: \"afaa6d92-7733-4a6e-92d5-860e63786a16\") " Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.698677 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxkmp\" (UniqueName: \"kubernetes.io/projected/79b975a8-25eb-4580-b8d0-786b2efe59b0-kube-api-access-dxkmp\") pod \"79b975a8-25eb-4580-b8d0-786b2efe59b0\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.698706 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79b975a8-25eb-4580-b8d0-786b2efe59b0-operator-scripts\") pod \"79b975a8-25eb-4580-b8d0-786b2efe59b0\" (UID: \"79b975a8-25eb-4580-b8d0-786b2efe59b0\") " Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.699379 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79b975a8-25eb-4580-b8d0-786b2efe59b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79b975a8-25eb-4580-b8d0-786b2efe59b0" (UID: "79b975a8-25eb-4580-b8d0-786b2efe59b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.699397 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afaa6d92-7733-4a6e-92d5-860e63786a16-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "afaa6d92-7733-4a6e-92d5-860e63786a16" (UID: "afaa6d92-7733-4a6e-92d5-860e63786a16"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.704087 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afaa6d92-7733-4a6e-92d5-860e63786a16-kube-api-access-b27h9" (OuterVolumeSpecName: "kube-api-access-b27h9") pod "afaa6d92-7733-4a6e-92d5-860e63786a16" (UID: "afaa6d92-7733-4a6e-92d5-860e63786a16"). InnerVolumeSpecName "kube-api-access-b27h9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.704147 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b975a8-25eb-4580-b8d0-786b2efe59b0-kube-api-access-dxkmp" (OuterVolumeSpecName: "kube-api-access-dxkmp") pod "79b975a8-25eb-4580-b8d0-786b2efe59b0" (UID: "79b975a8-25eb-4580-b8d0-786b2efe59b0"). InnerVolumeSpecName "kube-api-access-dxkmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.800472 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afaa6d92-7733-4a6e-92d5-860e63786a16-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.800746 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b27h9\" (UniqueName: \"kubernetes.io/projected/afaa6d92-7733-4a6e-92d5-860e63786a16-kube-api-access-b27h9\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.800806 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxkmp\" (UniqueName: \"kubernetes.io/projected/79b975a8-25eb-4580-b8d0-786b2efe59b0-kube-api-access-dxkmp\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:03 crc kubenswrapper[4811]: I0128 17:18:03.800901 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79b975a8-25eb-4580-b8d0-786b2efe59b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:04 crc kubenswrapper[4811]: I0128 17:18:04.143196 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b227-account-create-update-74djd" event={"ID":"afaa6d92-7733-4a6e-92d5-860e63786a16","Type":"ContainerDied","Data":"6844f10ced87822462592a1c22f799b592cf054afe975255734a4047ea276f41"} Jan 28 17:18:04 crc kubenswrapper[4811]: I0128 17:18:04.143236 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b227-account-create-update-74djd" Jan 28 17:18:04 crc kubenswrapper[4811]: I0128 17:18:04.143242 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6844f10ced87822462592a1c22f799b592cf054afe975255734a4047ea276f41" Jan 28 17:18:04 crc kubenswrapper[4811]: I0128 17:18:04.144649 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4fqr" event={"ID":"79b975a8-25eb-4580-b8d0-786b2efe59b0","Type":"ContainerDied","Data":"50a9038913bc5247f8005ac48d0e80db33cce9bf4add7eff128d2e33bac062c0"} Jan 28 17:18:04 crc kubenswrapper[4811]: I0128 17:18:04.144776 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50a9038913bc5247f8005ac48d0e80db33cce9bf4add7eff128d2e33bac062c0" Jan 28 17:18:04 crc kubenswrapper[4811]: I0128 17:18:04.144703 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4fqr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.148422 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-wjtzr"] Jan 28 17:18:05 crc kubenswrapper[4811]: E0128 17:18:05.149052 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b975a8-25eb-4580-b8d0-786b2efe59b0" containerName="mariadb-database-create" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.149069 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b975a8-25eb-4580-b8d0-786b2efe59b0" containerName="mariadb-database-create" Jan 28 17:18:05 crc kubenswrapper[4811]: E0128 17:18:05.149093 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afaa6d92-7733-4a6e-92d5-860e63786a16" containerName="mariadb-account-create-update" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.149101 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="afaa6d92-7733-4a6e-92d5-860e63786a16" containerName="mariadb-account-create-update" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.149321 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="afaa6d92-7733-4a6e-92d5-860e63786a16" containerName="mariadb-account-create-update" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.149345 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b975a8-25eb-4580-b8d0-786b2efe59b0" containerName="mariadb-database-create" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.150069 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.154209 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.154550 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-r28fl" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.162281 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-wjtzr"] Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.224304 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-combined-ca-bundle\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.225116 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-config-data\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.225142 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-db-sync-config-data\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.225183 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nfgh\" (UniqueName: \"kubernetes.io/projected/6f00cc50-d228-4d46-9778-d0c6db4a23fa-kube-api-access-2nfgh\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.326855 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nfgh\" (UniqueName: \"kubernetes.io/projected/6f00cc50-d228-4d46-9778-d0c6db4a23fa-kube-api-access-2nfgh\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.327255 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-combined-ca-bundle\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.327290 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-config-data\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.327313 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-db-sync-config-data\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.332257 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-config-data\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.332720 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-db-sync-config-data\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.343809 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-combined-ca-bundle\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.344068 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nfgh\" (UniqueName: \"kubernetes.io/projected/6f00cc50-d228-4d46-9778-d0c6db4a23fa-kube-api-access-2nfgh\") pod \"glance-db-sync-wjtzr\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:05 crc kubenswrapper[4811]: I0128 17:18:05.474742 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:06 crc kubenswrapper[4811]: W0128 17:18:06.082507 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f00cc50_d228_4d46_9778_d0c6db4a23fa.slice/crio-cb19dc5551e31a85de39c7b5edbd1ed6b11c9132572af4cd580bdcbd4739d449 WatchSource:0}: Error finding container cb19dc5551e31a85de39c7b5edbd1ed6b11c9132572af4cd580bdcbd4739d449: Status 404 returned error can't find the container with id cb19dc5551e31a85de39c7b5edbd1ed6b11c9132572af4cd580bdcbd4739d449 Jan 28 17:18:06 crc kubenswrapper[4811]: I0128 17:18:06.088140 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-wjtzr"] Jan 28 17:18:06 crc kubenswrapper[4811]: I0128 17:18:06.177947 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wjtzr" event={"ID":"6f00cc50-d228-4d46-9778-d0c6db4a23fa","Type":"ContainerStarted","Data":"cb19dc5551e31a85de39c7b5edbd1ed6b11c9132572af4cd580bdcbd4739d449"} Jan 28 17:18:07 crc kubenswrapper[4811]: I0128 17:18:07.189883 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wjtzr" event={"ID":"6f00cc50-d228-4d46-9778-d0c6db4a23fa","Type":"ContainerStarted","Data":"85a5f554cfb121a592972aa192922edb9cdbdc5399a5c7364bfafe964e55b4ab"} Jan 28 17:18:07 crc kubenswrapper[4811]: I0128 17:18:07.210835 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-wjtzr" podStartSLOduration=2.210812041 podStartE2EDuration="2.210812041s" podCreationTimestamp="2026-01-28 17:18:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:07.203270186 +0000 UTC m=+5579.957633769" watchObservedRunningTime="2026-01-28 17:18:07.210812041 +0000 UTC m=+5579.965175624" Jan 28 17:18:10 crc kubenswrapper[4811]: I0128 17:18:10.224731 4811 generic.go:334] "Generic (PLEG): container finished" podID="6f00cc50-d228-4d46-9778-d0c6db4a23fa" containerID="85a5f554cfb121a592972aa192922edb9cdbdc5399a5c7364bfafe964e55b4ab" exitCode=0 Jan 28 17:18:10 crc kubenswrapper[4811]: I0128 17:18:10.224864 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wjtzr" event={"ID":"6f00cc50-d228-4d46-9778-d0c6db4a23fa","Type":"ContainerDied","Data":"85a5f554cfb121a592972aa192922edb9cdbdc5399a5c7364bfafe964e55b4ab"} Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.596642 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.638072 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-config-data\") pod \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.638224 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nfgh\" (UniqueName: \"kubernetes.io/projected/6f00cc50-d228-4d46-9778-d0c6db4a23fa-kube-api-access-2nfgh\") pod \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.638324 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-db-sync-config-data\") pod \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.638373 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-combined-ca-bundle\") pod \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\" (UID: \"6f00cc50-d228-4d46-9778-d0c6db4a23fa\") " Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.642842 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f00cc50-d228-4d46-9778-d0c6db4a23fa-kube-api-access-2nfgh" (OuterVolumeSpecName: "kube-api-access-2nfgh") pod "6f00cc50-d228-4d46-9778-d0c6db4a23fa" (UID: "6f00cc50-d228-4d46-9778-d0c6db4a23fa"). InnerVolumeSpecName "kube-api-access-2nfgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.645952 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6f00cc50-d228-4d46-9778-d0c6db4a23fa" (UID: "6f00cc50-d228-4d46-9778-d0c6db4a23fa"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.676499 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f00cc50-d228-4d46-9778-d0c6db4a23fa" (UID: "6f00cc50-d228-4d46-9778-d0c6db4a23fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.686853 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-config-data" (OuterVolumeSpecName: "config-data") pod "6f00cc50-d228-4d46-9778-d0c6db4a23fa" (UID: "6f00cc50-d228-4d46-9778-d0c6db4a23fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.740658 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.740984 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nfgh\" (UniqueName: \"kubernetes.io/projected/6f00cc50-d228-4d46-9778-d0c6db4a23fa-kube-api-access-2nfgh\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.740999 4811 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:11 crc kubenswrapper[4811]: I0128 17:18:11.741007 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f00cc50-d228-4d46-9778-d0c6db4a23fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.242138 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wjtzr" event={"ID":"6f00cc50-d228-4d46-9778-d0c6db4a23fa","Type":"ContainerDied","Data":"cb19dc5551e31a85de39c7b5edbd1ed6b11c9132572af4cd580bdcbd4739d449"} Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.242191 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb19dc5551e31a85de39c7b5edbd1ed6b11c9132572af4cd580bdcbd4739d449" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.242224 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wjtzr" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.654799 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-857b556495-wb2l9"] Jan 28 17:18:12 crc kubenswrapper[4811]: E0128 17:18:12.655227 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f00cc50-d228-4d46-9778-d0c6db4a23fa" containerName="glance-db-sync" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.655244 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f00cc50-d228-4d46-9778-d0c6db4a23fa" containerName="glance-db-sync" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.655522 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f00cc50-d228-4d46-9778-d0c6db4a23fa" containerName="glance-db-sync" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.656594 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.704111 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-857b556495-wb2l9"] Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.758515 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc7hs\" (UniqueName: \"kubernetes.io/projected/2cd0bfee-b71c-4898-bc28-8ec123f17177-kube-api-access-pc7hs\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.758573 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-config\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.758625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-sb\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.758679 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-dns-svc\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.758803 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-nb\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.778452 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.780295 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.787008 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-r28fl" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.792606 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.792660 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.792905 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.825202 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861583 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-config-data\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861658 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc7hs\" (UniqueName: \"kubernetes.io/projected/2cd0bfee-b71c-4898-bc28-8ec123f17177-kube-api-access-pc7hs\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861685 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-config\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861730 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-logs\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861763 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-sb\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861806 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-scripts\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861836 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wsps\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-kube-api-access-8wsps\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861863 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861880 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-dns-svc\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.861917 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-ceph\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.862001 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-nb\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.862847 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-nb\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.865037 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-config\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.865263 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-sb\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.867997 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-dns-svc\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.920902 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc7hs\" (UniqueName: \"kubernetes.io/projected/2cd0bfee-b71c-4898-bc28-8ec123f17177-kube-api-access-pc7hs\") pod \"dnsmasq-dns-857b556495-wb2l9\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.963808 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-scripts\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964163 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wsps\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-kube-api-access-8wsps\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964203 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964248 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-ceph\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964346 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-config-data\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-logs\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964411 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.964973 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-logs\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.965398 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.968712 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-scripts\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.968854 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.969351 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-ceph\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.983889 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.984632 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wsps\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-kube-api-access-8wsps\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:12 crc kubenswrapper[4811]: I0128 17:18:12.985183 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-config-data\") pod \"glance-default-external-api-0\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.016650 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.017986 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.027166 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.045919 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068451 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068514 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068551 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-logs\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068593 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kslqd\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-kube-api-access-kslqd\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068746 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068857 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.068895 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-ceph\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.100737 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.170382 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kslqd\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-kube-api-access-kslqd\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.170798 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.170845 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.170869 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-ceph\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.170990 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.171018 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.171062 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-logs\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.171727 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-logs\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.171993 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.180083 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-ceph\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.180726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.180948 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.181507 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.189695 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kslqd\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-kube-api-access-kslqd\") pod \"glance-default-internal-api-0\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.390864 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.561635 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.572361 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-857b556495-wb2l9"] Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.948820 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:13 crc kubenswrapper[4811]: I0128 17:18:13.996311 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:14 crc kubenswrapper[4811]: W0128 17:18:14.004616 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10d0c1e3_184c_44d6_8a56_26cc234d77cc.slice/crio-7edb09007a732c505884e66c8dff2e11950063252b0daf60efaf6c80ba980c63 WatchSource:0}: Error finding container 7edb09007a732c505884e66c8dff2e11950063252b0daf60efaf6c80ba980c63: Status 404 returned error can't find the container with id 7edb09007a732c505884e66c8dff2e11950063252b0daf60efaf6c80ba980c63 Jan 28 17:18:14 crc kubenswrapper[4811]: I0128 17:18:14.272554 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a9b56e4-9483-4e78-9e34-ca75c82f846f","Type":"ContainerStarted","Data":"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d"} Jan 28 17:18:14 crc kubenswrapper[4811]: I0128 17:18:14.272871 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a9b56e4-9483-4e78-9e34-ca75c82f846f","Type":"ContainerStarted","Data":"7331bc39a63bd703d2346a261cce017e0bbe57f8f60e4e4ddccc8a828e0d1287"} Jan 28 17:18:14 crc kubenswrapper[4811]: I0128 17:18:14.278545 4811 generic.go:334] "Generic (PLEG): container finished" podID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerID="f3845bba5fe644274db6165bfc0fc888af923a5d6f4b01677f5543aa37e2c3df" exitCode=0 Jan 28 17:18:14 crc kubenswrapper[4811]: I0128 17:18:14.278608 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-857b556495-wb2l9" event={"ID":"2cd0bfee-b71c-4898-bc28-8ec123f17177","Type":"ContainerDied","Data":"f3845bba5fe644274db6165bfc0fc888af923a5d6f4b01677f5543aa37e2c3df"} Jan 28 17:18:14 crc kubenswrapper[4811]: I0128 17:18:14.278631 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-857b556495-wb2l9" event={"ID":"2cd0bfee-b71c-4898-bc28-8ec123f17177","Type":"ContainerStarted","Data":"bd4173b431976562d2121291119c48d6872d1ef0a171b1327ca8df43d66afbf9"} Jan 28 17:18:14 crc kubenswrapper[4811]: I0128 17:18:14.283092 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"10d0c1e3-184c-44d6-8a56-26cc234d77cc","Type":"ContainerStarted","Data":"7edb09007a732c505884e66c8dff2e11950063252b0daf60efaf6c80ba980c63"} Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.293317 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-857b556495-wb2l9" event={"ID":"2cd0bfee-b71c-4898-bc28-8ec123f17177","Type":"ContainerStarted","Data":"941e02617f279548eec7d269deda6b51a6325179296377fe72f3286b85dc5c98"} Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.295075 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.297726 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"10d0c1e3-184c-44d6-8a56-26cc234d77cc","Type":"ContainerStarted","Data":"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9"} Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.298836 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"10d0c1e3-184c-44d6-8a56-26cc234d77cc","Type":"ContainerStarted","Data":"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57"} Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.299852 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a9b56e4-9483-4e78-9e34-ca75c82f846f","Type":"ContainerStarted","Data":"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115"} Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.299951 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-log" containerID="cri-o://9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d" gracePeriod=30 Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.300089 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-httpd" containerID="cri-o://49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115" gracePeriod=30 Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.322681 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-857b556495-wb2l9" podStartSLOduration=3.322664198 podStartE2EDuration="3.322664198s" podCreationTimestamp="2026-01-28 17:18:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:15.31686044 +0000 UTC m=+5588.071224023" watchObservedRunningTime="2026-01-28 17:18:15.322664198 +0000 UTC m=+5588.077027781" Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.356197 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.356173971 podStartE2EDuration="3.356173971s" podCreationTimestamp="2026-01-28 17:18:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:15.332341292 +0000 UTC m=+5588.086704875" watchObservedRunningTime="2026-01-28 17:18:15.356173971 +0000 UTC m=+5588.110537554" Jan 28 17:18:15 crc kubenswrapper[4811]: I0128 17:18:15.361505 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.361487696 podStartE2EDuration="3.361487696s" podCreationTimestamp="2026-01-28 17:18:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:15.34990998 +0000 UTC m=+5588.104273563" watchObservedRunningTime="2026-01-28 17:18:15.361487696 +0000 UTC m=+5588.115851279" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.019217 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.042869 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-config-data\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.042933 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-logs\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.042970 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-httpd-run\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.043041 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-scripts\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.043080 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-ceph\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.043178 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-combined-ca-bundle\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.043223 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wsps\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-kube-api-access-8wsps\") pod \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\" (UID: \"6a9b56e4-9483-4e78-9e34-ca75c82f846f\") " Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.043574 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-logs" (OuterVolumeSpecName: "logs") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.043603 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.054257 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-scripts" (OuterVolumeSpecName: "scripts") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.054319 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-kube-api-access-8wsps" (OuterVolumeSpecName: "kube-api-access-8wsps") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "kube-api-access-8wsps". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.069347 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-ceph" (OuterVolumeSpecName: "ceph") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.092528 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.105720 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-config-data" (OuterVolumeSpecName: "config-data") pod "6a9b56e4-9483-4e78-9e34-ca75c82f846f" (UID: "6a9b56e4-9483-4e78-9e34-ca75c82f846f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146217 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146275 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146290 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146303 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wsps\" (UniqueName: \"kubernetes.io/projected/6a9b56e4-9483-4e78-9e34-ca75c82f846f-kube-api-access-8wsps\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146334 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9b56e4-9483-4e78-9e34-ca75c82f846f-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146349 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.146360 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a9b56e4-9483-4e78-9e34-ca75c82f846f-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.182803 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.307972 4811 generic.go:334] "Generic (PLEG): container finished" podID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerID="49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115" exitCode=0 Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.308004 4811 generic.go:334] "Generic (PLEG): container finished" podID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerID="9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d" exitCode=143 Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.308869 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.311629 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a9b56e4-9483-4e78-9e34-ca75c82f846f","Type":"ContainerDied","Data":"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115"} Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.311670 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a9b56e4-9483-4e78-9e34-ca75c82f846f","Type":"ContainerDied","Data":"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d"} Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.311681 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a9b56e4-9483-4e78-9e34-ca75c82f846f","Type":"ContainerDied","Data":"7331bc39a63bd703d2346a261cce017e0bbe57f8f60e4e4ddccc8a828e0d1287"} Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.311696 4811 scope.go:117] "RemoveContainer" containerID="49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.342595 4811 scope.go:117] "RemoveContainer" containerID="9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.356657 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.367309 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.368237 4811 scope.go:117] "RemoveContainer" containerID="49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115" Jan 28 17:18:16 crc kubenswrapper[4811]: E0128 17:18:16.370714 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115\": container with ID starting with 49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115 not found: ID does not exist" containerID="49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.370745 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115"} err="failed to get container status \"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115\": rpc error: code = NotFound desc = could not find container \"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115\": container with ID starting with 49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115 not found: ID does not exist" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.370767 4811 scope.go:117] "RemoveContainer" containerID="9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d" Jan 28 17:18:16 crc kubenswrapper[4811]: E0128 17:18:16.371308 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d\": container with ID starting with 9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d not found: ID does not exist" containerID="9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.371334 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d"} err="failed to get container status \"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d\": rpc error: code = NotFound desc = could not find container \"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d\": container with ID starting with 9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d not found: ID does not exist" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.371348 4811 scope.go:117] "RemoveContainer" containerID="49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.371698 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115"} err="failed to get container status \"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115\": rpc error: code = NotFound desc = could not find container \"49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115\": container with ID starting with 49bb10102392a3d7e09da55b7606297158cdb7ed5254991c1e81503f0564d115 not found: ID does not exist" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.371721 4811 scope.go:117] "RemoveContainer" containerID="9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.371983 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d"} err="failed to get container status \"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d\": rpc error: code = NotFound desc = could not find container \"9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d\": container with ID starting with 9177847ab1b8f31ba53e989fadc36be890991ac80af9a3f1e67ac98f33fab49d not found: ID does not exist" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.379758 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:16 crc kubenswrapper[4811]: E0128 17:18:16.380139 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-log" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.380156 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-log" Jan 28 17:18:16 crc kubenswrapper[4811]: E0128 17:18:16.380185 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-httpd" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.380191 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-httpd" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.380347 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-httpd" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.380365 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" containerName="glance-log" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.381256 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.386557 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.390352 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.452275 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-logs\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.452579 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-scripts\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.452695 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.452881 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-config-data\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.452997 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.453213 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-ceph\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.453356 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tslgz\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-kube-api-access-tslgz\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: E0128 17:18:16.498946 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a9b56e4_9483_4e78_9e34_ca75c82f846f.slice/crio-7331bc39a63bd703d2346a261cce017e0bbe57f8f60e4e4ddccc8a828e0d1287\": RecentStats: unable to find data in memory cache]" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.554960 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-logs\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.555080 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-scripts\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.555108 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.555150 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-config-data\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.555185 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.555269 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-ceph\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.555294 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tslgz\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-kube-api-access-tslgz\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.556060 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-logs\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.557004 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.561734 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-config-data\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.562034 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-ceph\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.562502 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-scripts\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.563812 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.582347 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tslgz\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-kube-api-access-tslgz\") pod \"glance-default-external-api-0\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " pod="openstack/glance-default-external-api-0" Jan 28 17:18:16 crc kubenswrapper[4811]: I0128 17:18:16.702249 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.283116 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.323913 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"177ba4f4-b77d-49b6-b9af-37a5a5a872d7","Type":"ContainerStarted","Data":"da52dda329d9ba877cab1e14273d7e9a307e19fea79fa38b74ded23f4c59c18c"} Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.325599 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-log" containerID="cri-o://26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57" gracePeriod=30 Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.325736 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-httpd" containerID="cri-o://e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9" gracePeriod=30 Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.890756 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.996992 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-scripts\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.997035 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-combined-ca-bundle\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.997078 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kslqd\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-kube-api-access-kslqd\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.997175 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-logs\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.997195 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-config-data\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.997267 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-ceph\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.997291 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-httpd-run\") pod \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\" (UID: \"10d0c1e3-184c-44d6-8a56-26cc234d77cc\") " Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.998118 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:18:17 crc kubenswrapper[4811]: I0128 17:18:17.998260 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-logs" (OuterVolumeSpecName: "logs") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.004724 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-scripts" (OuterVolumeSpecName: "scripts") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.004786 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-ceph" (OuterVolumeSpecName: "ceph") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.005092 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-kube-api-access-kslqd" (OuterVolumeSpecName: "kube-api-access-kslqd") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "kube-api-access-kslqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.042567 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.053839 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-config-data" (OuterVolumeSpecName: "config-data") pod "10d0c1e3-184c-44d6-8a56-26cc234d77cc" (UID: "10d0c1e3-184c-44d6-8a56-26cc234d77cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099705 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099751 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099766 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kslqd\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-kube-api-access-kslqd\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099779 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099791 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10d0c1e3-184c-44d6-8a56-26cc234d77cc-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099803 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10d0c1e3-184c-44d6-8a56-26cc234d77cc-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.099813 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/10d0c1e3-184c-44d6-8a56-26cc234d77cc-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.337272 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"177ba4f4-b77d-49b6-b9af-37a5a5a872d7","Type":"ContainerStarted","Data":"d1876907051ab6bbe91a76928452e9ab313dcf35ea99e23a38a39782b1e8ccdd"} Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.340266 4811 generic.go:334] "Generic (PLEG): container finished" podID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerID="e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9" exitCode=0 Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.340303 4811 generic.go:334] "Generic (PLEG): container finished" podID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerID="26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57" exitCode=143 Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.347067 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.350562 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a9b56e4-9483-4e78-9e34-ca75c82f846f" path="/var/lib/kubelet/pods/6a9b56e4-9483-4e78-9e34-ca75c82f846f/volumes" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.351467 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"10d0c1e3-184c-44d6-8a56-26cc234d77cc","Type":"ContainerDied","Data":"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9"} Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.351507 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"10d0c1e3-184c-44d6-8a56-26cc234d77cc","Type":"ContainerDied","Data":"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57"} Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.351517 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"10d0c1e3-184c-44d6-8a56-26cc234d77cc","Type":"ContainerDied","Data":"7edb09007a732c505884e66c8dff2e11950063252b0daf60efaf6c80ba980c63"} Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.351535 4811 scope.go:117] "RemoveContainer" containerID="e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.387701 4811 scope.go:117] "RemoveContainer" containerID="26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.395818 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.404947 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.408221 4811 scope.go:117] "RemoveContainer" containerID="e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9" Jan 28 17:18:18 crc kubenswrapper[4811]: E0128 17:18:18.411546 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9\": container with ID starting with e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9 not found: ID does not exist" containerID="e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.411588 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9"} err="failed to get container status \"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9\": rpc error: code = NotFound desc = could not find container \"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9\": container with ID starting with e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9 not found: ID does not exist" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.411613 4811 scope.go:117] "RemoveContainer" containerID="26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57" Jan 28 17:18:18 crc kubenswrapper[4811]: E0128 17:18:18.412346 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57\": container with ID starting with 26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57 not found: ID does not exist" containerID="26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.412367 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57"} err="failed to get container status \"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57\": rpc error: code = NotFound desc = could not find container \"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57\": container with ID starting with 26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57 not found: ID does not exist" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.412382 4811 scope.go:117] "RemoveContainer" containerID="e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.412804 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9"} err="failed to get container status \"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9\": rpc error: code = NotFound desc = could not find container \"e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9\": container with ID starting with e0e3ba9df702887cc3add5e6e2c251a44a4da5397139f084078c1f3e743daab9 not found: ID does not exist" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.412826 4811 scope.go:117] "RemoveContainer" containerID="26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.413116 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57"} err="failed to get container status \"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57\": rpc error: code = NotFound desc = could not find container \"26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57\": container with ID starting with 26d236e9758a2a120ac13da585af2d114f3d809cedcd8fbcb50a7a0348a23f57 not found: ID does not exist" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.420965 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:18 crc kubenswrapper[4811]: E0128 17:18:18.421381 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-log" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.421397 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-log" Jan 28 17:18:18 crc kubenswrapper[4811]: E0128 17:18:18.421422 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-httpd" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.421480 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-httpd" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.421695 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-httpd" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.421720 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" containerName="glance-log" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.422839 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.425035 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.439658 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.509841 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.509911 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-ceph\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.510009 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-logs\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.510043 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.510081 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6rpg\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-kube-api-access-k6rpg\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.510113 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.510170 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.611929 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612318 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-ceph\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612410 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-logs\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612462 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612505 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6rpg\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-kube-api-access-k6rpg\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612544 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612608 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612967 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-logs\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.612989 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.621677 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.622173 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-ceph\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.623492 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.633126 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.648234 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6rpg\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-kube-api-access-k6rpg\") pod \"glance-default-internal-api-0\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:18:18 crc kubenswrapper[4811]: I0128 17:18:18.740266 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:19 crc kubenswrapper[4811]: I0128 17:18:19.271642 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:18:19 crc kubenswrapper[4811]: W0128 17:18:19.279208 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1447823_7ddf_4d19_965e_d975ce17c540.slice/crio-b8b4d87081eecaa4b641e2cfb0c48bf528871add99225e6beff5fb6d91130e29 WatchSource:0}: Error finding container b8b4d87081eecaa4b641e2cfb0c48bf528871add99225e6beff5fb6d91130e29: Status 404 returned error can't find the container with id b8b4d87081eecaa4b641e2cfb0c48bf528871add99225e6beff5fb6d91130e29 Jan 28 17:18:19 crc kubenswrapper[4811]: I0128 17:18:19.350415 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"177ba4f4-b77d-49b6-b9af-37a5a5a872d7","Type":"ContainerStarted","Data":"0607aa5f56411773fa3b491b98ade32a2acf2e18273bcdc4177c03e51f447445"} Jan 28 17:18:19 crc kubenswrapper[4811]: I0128 17:18:19.355283 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a1447823-7ddf-4d19-965e-d975ce17c540","Type":"ContainerStarted","Data":"b8b4d87081eecaa4b641e2cfb0c48bf528871add99225e6beff5fb6d91130e29"} Jan 28 17:18:19 crc kubenswrapper[4811]: I0128 17:18:19.383139 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.383115265 podStartE2EDuration="3.383115265s" podCreationTimestamp="2026-01-28 17:18:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:19.375469407 +0000 UTC m=+5592.129832990" watchObservedRunningTime="2026-01-28 17:18:19.383115265 +0000 UTC m=+5592.137478848" Jan 28 17:18:20 crc kubenswrapper[4811]: I0128 17:18:20.352412 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10d0c1e3-184c-44d6-8a56-26cc234d77cc" path="/var/lib/kubelet/pods/10d0c1e3-184c-44d6-8a56-26cc234d77cc/volumes" Jan 28 17:18:20 crc kubenswrapper[4811]: I0128 17:18:20.367065 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a1447823-7ddf-4d19-965e-d975ce17c540","Type":"ContainerStarted","Data":"5b051ae206aa0aa155efec89b12be9b53147d8c5310b0ee0757ddca087b8b7d2"} Jan 28 17:18:21 crc kubenswrapper[4811]: I0128 17:18:21.379587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a1447823-7ddf-4d19-965e-d975ce17c540","Type":"ContainerStarted","Data":"c8a168ad02dbb98b713cad79c536ca2d9670d0416b666f1dd5932b9145aa7cd5"} Jan 28 17:18:22 crc kubenswrapper[4811]: I0128 17:18:22.986640 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.005002 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.004977196 podStartE2EDuration="5.004977196s" podCreationTimestamp="2026-01-28 17:18:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:21.40606987 +0000 UTC m=+5594.160433453" watchObservedRunningTime="2026-01-28 17:18:23.004977196 +0000 UTC m=+5595.759340779" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.041773 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbc7f9489-jbcqx"] Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.042115 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="dnsmasq-dns" containerID="cri-o://78f89a47d530238bc1e232d5499b43537ded37543ea9a3f7f0fad2b5e1e27318" gracePeriod=10 Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.403716 4811 generic.go:334] "Generic (PLEG): container finished" podID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerID="78f89a47d530238bc1e232d5499b43537ded37543ea9a3f7f0fad2b5e1e27318" exitCode=0 Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.403921 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" event={"ID":"fde783c7-bbcb-4005-9507-0610eb14ba6a","Type":"ContainerDied","Data":"78f89a47d530238bc1e232d5499b43537ded37543ea9a3f7f0fad2b5e1e27318"} Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.523161 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.602334 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rsdk\" (UniqueName: \"kubernetes.io/projected/fde783c7-bbcb-4005-9507-0610eb14ba6a-kube-api-access-6rsdk\") pod \"fde783c7-bbcb-4005-9507-0610eb14ba6a\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.602584 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-dns-svc\") pod \"fde783c7-bbcb-4005-9507-0610eb14ba6a\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.602664 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-nb\") pod \"fde783c7-bbcb-4005-9507-0610eb14ba6a\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.602752 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-config\") pod \"fde783c7-bbcb-4005-9507-0610eb14ba6a\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.602818 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-sb\") pod \"fde783c7-bbcb-4005-9507-0610eb14ba6a\" (UID: \"fde783c7-bbcb-4005-9507-0610eb14ba6a\") " Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.608741 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fde783c7-bbcb-4005-9507-0610eb14ba6a-kube-api-access-6rsdk" (OuterVolumeSpecName: "kube-api-access-6rsdk") pod "fde783c7-bbcb-4005-9507-0610eb14ba6a" (UID: "fde783c7-bbcb-4005-9507-0610eb14ba6a"). InnerVolumeSpecName "kube-api-access-6rsdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.653096 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fde783c7-bbcb-4005-9507-0610eb14ba6a" (UID: "fde783c7-bbcb-4005-9507-0610eb14ba6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.656769 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-config" (OuterVolumeSpecName: "config") pod "fde783c7-bbcb-4005-9507-0610eb14ba6a" (UID: "fde783c7-bbcb-4005-9507-0610eb14ba6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.668083 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fde783c7-bbcb-4005-9507-0610eb14ba6a" (UID: "fde783c7-bbcb-4005-9507-0610eb14ba6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.677512 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fde783c7-bbcb-4005-9507-0610eb14ba6a" (UID: "fde783c7-bbcb-4005-9507-0610eb14ba6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.705214 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.705252 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.705264 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rsdk\" (UniqueName: \"kubernetes.io/projected/fde783c7-bbcb-4005-9507-0610eb14ba6a-kube-api-access-6rsdk\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.705273 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:23 crc kubenswrapper[4811]: I0128 17:18:23.705283 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fde783c7-bbcb-4005-9507-0610eb14ba6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:24 crc kubenswrapper[4811]: I0128 17:18:24.417547 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" event={"ID":"fde783c7-bbcb-4005-9507-0610eb14ba6a","Type":"ContainerDied","Data":"78a047e90cdd76085180fda35e0377841abfc696e11e908744150f7609b4fb55"} Jan 28 17:18:24 crc kubenswrapper[4811]: I0128 17:18:24.417662 4811 scope.go:117] "RemoveContainer" containerID="78f89a47d530238bc1e232d5499b43537ded37543ea9a3f7f0fad2b5e1e27318" Jan 28 17:18:24 crc kubenswrapper[4811]: I0128 17:18:24.417681 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" Jan 28 17:18:24 crc kubenswrapper[4811]: I0128 17:18:24.441694 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbc7f9489-jbcqx"] Jan 28 17:18:24 crc kubenswrapper[4811]: I0128 17:18:24.445332 4811 scope.go:117] "RemoveContainer" containerID="201d1e26dc391317a26f2a3e2673730e2449ec83e3aa362906b130802156941b" Jan 28 17:18:24 crc kubenswrapper[4811]: I0128 17:18:24.451373 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbc7f9489-jbcqx"] Jan 28 17:18:26 crc kubenswrapper[4811]: I0128 17:18:26.354233 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" path="/var/lib/kubelet/pods/fde783c7-bbcb-4005-9507-0610eb14ba6a/volumes" Jan 28 17:18:26 crc kubenswrapper[4811]: I0128 17:18:26.702388 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 28 17:18:26 crc kubenswrapper[4811]: I0128 17:18:26.702459 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 28 17:18:26 crc kubenswrapper[4811]: I0128 17:18:26.732106 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 28 17:18:26 crc kubenswrapper[4811]: I0128 17:18:26.739851 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 28 17:18:27 crc kubenswrapper[4811]: I0128 17:18:27.461156 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 28 17:18:27 crc kubenswrapper[4811]: I0128 17:18:27.462220 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 28 17:18:28 crc kubenswrapper[4811]: I0128 17:18:28.319993 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-bbc7f9489-jbcqx" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.37:5353: i/o timeout" Jan 28 17:18:28 crc kubenswrapper[4811]: I0128 17:18:28.741241 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:28 crc kubenswrapper[4811]: I0128 17:18:28.741304 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:28 crc kubenswrapper[4811]: I0128 17:18:28.770485 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:28 crc kubenswrapper[4811]: I0128 17:18:28.788102 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:29 crc kubenswrapper[4811]: I0128 17:18:29.437090 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 28 17:18:29 crc kubenswrapper[4811]: I0128 17:18:29.480894 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:18:29 crc kubenswrapper[4811]: I0128 17:18:29.481442 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:29 crc kubenswrapper[4811]: I0128 17:18:29.481544 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:29 crc kubenswrapper[4811]: I0128 17:18:29.567193 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 28 17:18:30 crc kubenswrapper[4811]: I0128 17:18:30.642989 4811 scope.go:117] "RemoveContainer" containerID="07a8b98e5d1fd06278747cf9f48a130627fa149684ffb0fc255c9a572ac53f91" Jan 28 17:18:31 crc kubenswrapper[4811]: I0128 17:18:31.496455 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:31 crc kubenswrapper[4811]: I0128 17:18:31.502777 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:18:31 crc kubenswrapper[4811]: I0128 17:18:31.578889 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 28 17:18:33 crc kubenswrapper[4811]: I0128 17:18:33.087249 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:18:33 crc kubenswrapper[4811]: I0128 17:18:33.087336 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.647264 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-h8njg"] Jan 28 17:18:39 crc kubenswrapper[4811]: E0128 17:18:39.648239 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="init" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.648256 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="init" Jan 28 17:18:39 crc kubenswrapper[4811]: E0128 17:18:39.648268 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="dnsmasq-dns" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.648276 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="dnsmasq-dns" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.648514 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fde783c7-bbcb-4005-9507-0610eb14ba6a" containerName="dnsmasq-dns" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.649275 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.657933 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-h8njg"] Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.685826 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-dd96-account-create-update-vw7j6"] Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.687296 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.693767 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.700741 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-dd96-account-create-update-vw7j6"] Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.734588 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtmjd\" (UniqueName: \"kubernetes.io/projected/6f6c7f6b-5422-45d6-b899-c145e27f5010-kube-api-access-vtmjd\") pod \"placement-db-create-h8njg\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.734673 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14f5c029-7f55-4c4a-a496-a068e9e96521-operator-scripts\") pod \"placement-dd96-account-create-update-vw7j6\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.734794 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f6c7f6b-5422-45d6-b899-c145e27f5010-operator-scripts\") pod \"placement-db-create-h8njg\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.734828 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4bmp\" (UniqueName: \"kubernetes.io/projected/14f5c029-7f55-4c4a-a496-a068e9e96521-kube-api-access-d4bmp\") pod \"placement-dd96-account-create-update-vw7j6\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.836034 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14f5c029-7f55-4c4a-a496-a068e9e96521-operator-scripts\") pod \"placement-dd96-account-create-update-vw7j6\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.836871 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f6c7f6b-5422-45d6-b899-c145e27f5010-operator-scripts\") pod \"placement-db-create-h8njg\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.836903 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4bmp\" (UniqueName: \"kubernetes.io/projected/14f5c029-7f55-4c4a-a496-a068e9e96521-kube-api-access-d4bmp\") pod \"placement-dd96-account-create-update-vw7j6\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.836943 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtmjd\" (UniqueName: \"kubernetes.io/projected/6f6c7f6b-5422-45d6-b899-c145e27f5010-kube-api-access-vtmjd\") pod \"placement-db-create-h8njg\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.836770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14f5c029-7f55-4c4a-a496-a068e9e96521-operator-scripts\") pod \"placement-dd96-account-create-update-vw7j6\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.837819 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f6c7f6b-5422-45d6-b899-c145e27f5010-operator-scripts\") pod \"placement-db-create-h8njg\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.856251 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtmjd\" (UniqueName: \"kubernetes.io/projected/6f6c7f6b-5422-45d6-b899-c145e27f5010-kube-api-access-vtmjd\") pod \"placement-db-create-h8njg\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " pod="openstack/placement-db-create-h8njg" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.856797 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4bmp\" (UniqueName: \"kubernetes.io/projected/14f5c029-7f55-4c4a-a496-a068e9e96521-kube-api-access-d4bmp\") pod \"placement-dd96-account-create-update-vw7j6\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:39 crc kubenswrapper[4811]: I0128 17:18:39.971418 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-h8njg" Jan 28 17:18:40 crc kubenswrapper[4811]: I0128 17:18:40.024269 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:40 crc kubenswrapper[4811]: I0128 17:18:40.411008 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-h8njg"] Jan 28 17:18:40 crc kubenswrapper[4811]: W0128 17:18:40.411170 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f6c7f6b_5422_45d6_b899_c145e27f5010.slice/crio-dc4836d142a60a5d6093bf466020967f32bd74b9a3573eeedddb67c8c21d32f9 WatchSource:0}: Error finding container dc4836d142a60a5d6093bf466020967f32bd74b9a3573eeedddb67c8c21d32f9: Status 404 returned error can't find the container with id dc4836d142a60a5d6093bf466020967f32bd74b9a3573eeedddb67c8c21d32f9 Jan 28 17:18:40 crc kubenswrapper[4811]: I0128 17:18:40.517764 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-dd96-account-create-update-vw7j6"] Jan 28 17:18:40 crc kubenswrapper[4811]: W0128 17:18:40.518910 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f5c029_7f55_4c4a_a496_a068e9e96521.slice/crio-55b6b6ce7affdbcb2bb068d0515807fc1a1e8d093d076df57296dedb3328b8ff WatchSource:0}: Error finding container 55b6b6ce7affdbcb2bb068d0515807fc1a1e8d093d076df57296dedb3328b8ff: Status 404 returned error can't find the container with id 55b6b6ce7affdbcb2bb068d0515807fc1a1e8d093d076df57296dedb3328b8ff Jan 28 17:18:40 crc kubenswrapper[4811]: I0128 17:18:40.580241 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-h8njg" event={"ID":"6f6c7f6b-5422-45d6-b899-c145e27f5010","Type":"ContainerStarted","Data":"dc4836d142a60a5d6093bf466020967f32bd74b9a3573eeedddb67c8c21d32f9"} Jan 28 17:18:40 crc kubenswrapper[4811]: I0128 17:18:40.582113 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-dd96-account-create-update-vw7j6" event={"ID":"14f5c029-7f55-4c4a-a496-a068e9e96521","Type":"ContainerStarted","Data":"55b6b6ce7affdbcb2bb068d0515807fc1a1e8d093d076df57296dedb3328b8ff"} Jan 28 17:18:41 crc kubenswrapper[4811]: I0128 17:18:41.594983 4811 generic.go:334] "Generic (PLEG): container finished" podID="6f6c7f6b-5422-45d6-b899-c145e27f5010" containerID="f5d84c12962758b719211ea28916931d7d11691f91dee475568b847d80c457ca" exitCode=0 Jan 28 17:18:41 crc kubenswrapper[4811]: I0128 17:18:41.595244 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-h8njg" event={"ID":"6f6c7f6b-5422-45d6-b899-c145e27f5010","Type":"ContainerDied","Data":"f5d84c12962758b719211ea28916931d7d11691f91dee475568b847d80c457ca"} Jan 28 17:18:41 crc kubenswrapper[4811]: I0128 17:18:41.597915 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-dd96-account-create-update-vw7j6" event={"ID":"14f5c029-7f55-4c4a-a496-a068e9e96521","Type":"ContainerStarted","Data":"7e21d6857763ae5b856c90b2d58f5d0619ea75ce87ea67c4aee02eb8127fb13f"} Jan 28 17:18:41 crc kubenswrapper[4811]: I0128 17:18:41.636484 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-dd96-account-create-update-vw7j6" podStartSLOduration=2.636458335 podStartE2EDuration="2.636458335s" podCreationTimestamp="2026-01-28 17:18:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:41.630954295 +0000 UTC m=+5614.385317878" watchObservedRunningTime="2026-01-28 17:18:41.636458335 +0000 UTC m=+5614.390821918" Jan 28 17:18:42 crc kubenswrapper[4811]: I0128 17:18:42.608339 4811 generic.go:334] "Generic (PLEG): container finished" podID="14f5c029-7f55-4c4a-a496-a068e9e96521" containerID="7e21d6857763ae5b856c90b2d58f5d0619ea75ce87ea67c4aee02eb8127fb13f" exitCode=0 Jan 28 17:18:42 crc kubenswrapper[4811]: I0128 17:18:42.608468 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-dd96-account-create-update-vw7j6" event={"ID":"14f5c029-7f55-4c4a-a496-a068e9e96521","Type":"ContainerDied","Data":"7e21d6857763ae5b856c90b2d58f5d0619ea75ce87ea67c4aee02eb8127fb13f"} Jan 28 17:18:42 crc kubenswrapper[4811]: I0128 17:18:42.927601 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-h8njg" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.007918 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f6c7f6b-5422-45d6-b899-c145e27f5010-operator-scripts\") pod \"6f6c7f6b-5422-45d6-b899-c145e27f5010\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.008001 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtmjd\" (UniqueName: \"kubernetes.io/projected/6f6c7f6b-5422-45d6-b899-c145e27f5010-kube-api-access-vtmjd\") pod \"6f6c7f6b-5422-45d6-b899-c145e27f5010\" (UID: \"6f6c7f6b-5422-45d6-b899-c145e27f5010\") " Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.008417 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f6c7f6b-5422-45d6-b899-c145e27f5010-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6f6c7f6b-5422-45d6-b899-c145e27f5010" (UID: "6f6c7f6b-5422-45d6-b899-c145e27f5010"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.013115 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f6c7f6b-5422-45d6-b899-c145e27f5010-kube-api-access-vtmjd" (OuterVolumeSpecName: "kube-api-access-vtmjd") pod "6f6c7f6b-5422-45d6-b899-c145e27f5010" (UID: "6f6c7f6b-5422-45d6-b899-c145e27f5010"). InnerVolumeSpecName "kube-api-access-vtmjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.110861 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f6c7f6b-5422-45d6-b899-c145e27f5010-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.110929 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtmjd\" (UniqueName: \"kubernetes.io/projected/6f6c7f6b-5422-45d6-b899-c145e27f5010-kube-api-access-vtmjd\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.618474 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-h8njg" event={"ID":"6f6c7f6b-5422-45d6-b899-c145e27f5010","Type":"ContainerDied","Data":"dc4836d142a60a5d6093bf466020967f32bd74b9a3573eeedddb67c8c21d32f9"} Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.620621 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc4836d142a60a5d6093bf466020967f32bd74b9a3573eeedddb67c8c21d32f9" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.618493 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-h8njg" Jan 28 17:18:43 crc kubenswrapper[4811]: I0128 17:18:43.940132 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.026174 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14f5c029-7f55-4c4a-a496-a068e9e96521-operator-scripts\") pod \"14f5c029-7f55-4c4a-a496-a068e9e96521\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.027479 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4bmp\" (UniqueName: \"kubernetes.io/projected/14f5c029-7f55-4c4a-a496-a068e9e96521-kube-api-access-d4bmp\") pod \"14f5c029-7f55-4c4a-a496-a068e9e96521\" (UID: \"14f5c029-7f55-4c4a-a496-a068e9e96521\") " Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.027324 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f5c029-7f55-4c4a-a496-a068e9e96521-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "14f5c029-7f55-4c4a-a496-a068e9e96521" (UID: "14f5c029-7f55-4c4a-a496-a068e9e96521"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.029216 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14f5c029-7f55-4c4a-a496-a068e9e96521-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.042747 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f5c029-7f55-4c4a-a496-a068e9e96521-kube-api-access-d4bmp" (OuterVolumeSpecName: "kube-api-access-d4bmp") pod "14f5c029-7f55-4c4a-a496-a068e9e96521" (UID: "14f5c029-7f55-4c4a-a496-a068e9e96521"). InnerVolumeSpecName "kube-api-access-d4bmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.131242 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4bmp\" (UniqueName: \"kubernetes.io/projected/14f5c029-7f55-4c4a-a496-a068e9e96521-kube-api-access-d4bmp\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.628583 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-dd96-account-create-update-vw7j6" event={"ID":"14f5c029-7f55-4c4a-a496-a068e9e96521","Type":"ContainerDied","Data":"55b6b6ce7affdbcb2bb068d0515807fc1a1e8d093d076df57296dedb3328b8ff"} Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.628644 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55b6b6ce7affdbcb2bb068d0515807fc1a1e8d093d076df57296dedb3328b8ff" Jan 28 17:18:44 crc kubenswrapper[4811]: I0128 17:18:44.628821 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-dd96-account-create-update-vw7j6" Jan 28 17:18:49 crc kubenswrapper[4811]: I0128 17:18:49.987657 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dbc457d5-xbhz5"] Jan 28 17:18:49 crc kubenswrapper[4811]: E0128 17:18:49.988664 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f5c029-7f55-4c4a-a496-a068e9e96521" containerName="mariadb-account-create-update" Jan 28 17:18:49 crc kubenswrapper[4811]: I0128 17:18:49.988681 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f5c029-7f55-4c4a-a496-a068e9e96521" containerName="mariadb-account-create-update" Jan 28 17:18:49 crc kubenswrapper[4811]: E0128 17:18:49.988700 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f6c7f6b-5422-45d6-b899-c145e27f5010" containerName="mariadb-database-create" Jan 28 17:18:49 crc kubenswrapper[4811]: I0128 17:18:49.988708 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f6c7f6b-5422-45d6-b899-c145e27f5010" containerName="mariadb-database-create" Jan 28 17:18:49 crc kubenswrapper[4811]: I0128 17:18:49.988925 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f5c029-7f55-4c4a-a496-a068e9e96521" containerName="mariadb-account-create-update" Jan 28 17:18:49 crc kubenswrapper[4811]: I0128 17:18:49.988953 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f6c7f6b-5422-45d6-b899-c145e27f5010" containerName="mariadb-database-create" Jan 28 17:18:49 crc kubenswrapper[4811]: I0128 17:18:49.990009 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.004492 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dbc457d5-xbhz5"] Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.037142 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-zrw2c"] Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.038718 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.044094 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-nb\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.044167 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w4v9\" (UniqueName: \"kubernetes.io/projected/225db1e0-fddb-4f42-95fe-2a44bdbec853-kube-api-access-5w4v9\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.044278 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-config\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.044361 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-dns-svc\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.044391 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-sb\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.046835 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.047153 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.047310 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kt5gp" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.082144 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zrw2c"] Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.145704 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwntr\" (UniqueName: \"kubernetes.io/projected/e3320608-2d14-467b-9fc6-5aece23f7f84-kube-api-access-vwntr\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.145939 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-scripts\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146069 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-config\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146157 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-config-data\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146234 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3320608-2d14-467b-9fc6-5aece23f7f84-logs\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146313 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-combined-ca-bundle\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146391 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-dns-svc\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146528 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-sb\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146629 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-nb\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.146704 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w4v9\" (UniqueName: \"kubernetes.io/projected/225db1e0-fddb-4f42-95fe-2a44bdbec853-kube-api-access-5w4v9\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.148040 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-dns-svc\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.148274 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-sb\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.148741 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-nb\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.149506 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-config\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.167380 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w4v9\" (UniqueName: \"kubernetes.io/projected/225db1e0-fddb-4f42-95fe-2a44bdbec853-kube-api-access-5w4v9\") pod \"dnsmasq-dns-6dbc457d5-xbhz5\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.249437 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-scripts\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.249709 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-config-data\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.249789 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3320608-2d14-467b-9fc6-5aece23f7f84-logs\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.249867 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-combined-ca-bundle\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.249997 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwntr\" (UniqueName: \"kubernetes.io/projected/e3320608-2d14-467b-9fc6-5aece23f7f84-kube-api-access-vwntr\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.250827 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3320608-2d14-467b-9fc6-5aece23f7f84-logs\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.253229 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-scripts\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.260025 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-combined-ca-bundle\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.261993 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-config-data\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.267722 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwntr\" (UniqueName: \"kubernetes.io/projected/e3320608-2d14-467b-9fc6-5aece23f7f84-kube-api-access-vwntr\") pod \"placement-db-sync-zrw2c\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.323963 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.370403 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.778469 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dbc457d5-xbhz5"] Jan 28 17:18:50 crc kubenswrapper[4811]: I0128 17:18:50.906979 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zrw2c"] Jan 28 17:18:51 crc kubenswrapper[4811]: I0128 17:18:51.690303 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zrw2c" event={"ID":"e3320608-2d14-467b-9fc6-5aece23f7f84","Type":"ContainerStarted","Data":"f90a5112f9c65145397f562ff177bbdd19806b46d91120f6746b150f0c72fc99"} Jan 28 17:18:51 crc kubenswrapper[4811]: I0128 17:18:51.691748 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zrw2c" event={"ID":"e3320608-2d14-467b-9fc6-5aece23f7f84","Type":"ContainerStarted","Data":"c841b1f4b2228b593bc6670419402ece45fac831835660fce133c131491bbefd"} Jan 28 17:18:51 crc kubenswrapper[4811]: I0128 17:18:51.693278 4811 generic.go:334] "Generic (PLEG): container finished" podID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerID="8e969f58d02be96aa11a7d54549708823f2ea7384b663c92d0f65e22646658be" exitCode=0 Jan 28 17:18:51 crc kubenswrapper[4811]: I0128 17:18:51.693318 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" event={"ID":"225db1e0-fddb-4f42-95fe-2a44bdbec853","Type":"ContainerDied","Data":"8e969f58d02be96aa11a7d54549708823f2ea7384b663c92d0f65e22646658be"} Jan 28 17:18:51 crc kubenswrapper[4811]: I0128 17:18:51.693343 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" event={"ID":"225db1e0-fddb-4f42-95fe-2a44bdbec853","Type":"ContainerStarted","Data":"b0b3e818a7b8aabdc9339e7e171b369db51d5db81d5746b9fe87408b1ca22215"} Jan 28 17:18:51 crc kubenswrapper[4811]: I0128 17:18:51.712841 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-zrw2c" podStartSLOduration=2.712818305 podStartE2EDuration="2.712818305s" podCreationTimestamp="2026-01-28 17:18:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:51.708741624 +0000 UTC m=+5624.463105207" watchObservedRunningTime="2026-01-28 17:18:51.712818305 +0000 UTC m=+5624.467181888" Jan 28 17:18:52 crc kubenswrapper[4811]: I0128 17:18:52.703840 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" event={"ID":"225db1e0-fddb-4f42-95fe-2a44bdbec853","Type":"ContainerStarted","Data":"dadb37bfcbd13c8eef53a93b7d3f1d952582b86dca11c3ba39b9c9460fb73108"} Jan 28 17:18:52 crc kubenswrapper[4811]: I0128 17:18:52.704201 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:18:52 crc kubenswrapper[4811]: I0128 17:18:52.706297 4811 generic.go:334] "Generic (PLEG): container finished" podID="e3320608-2d14-467b-9fc6-5aece23f7f84" containerID="f90a5112f9c65145397f562ff177bbdd19806b46d91120f6746b150f0c72fc99" exitCode=0 Jan 28 17:18:52 crc kubenswrapper[4811]: I0128 17:18:52.706360 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zrw2c" event={"ID":"e3320608-2d14-467b-9fc6-5aece23f7f84","Type":"ContainerDied","Data":"f90a5112f9c65145397f562ff177bbdd19806b46d91120f6746b150f0c72fc99"} Jan 28 17:18:52 crc kubenswrapper[4811]: I0128 17:18:52.727284 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" podStartSLOduration=3.727260244 podStartE2EDuration="3.727260244s" podCreationTimestamp="2026-01-28 17:18:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:52.722499834 +0000 UTC m=+5625.476863447" watchObservedRunningTime="2026-01-28 17:18:52.727260244 +0000 UTC m=+5625.481623827" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.101208 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.230106 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3320608-2d14-467b-9fc6-5aece23f7f84-logs\") pod \"e3320608-2d14-467b-9fc6-5aece23f7f84\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.230299 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-config-data\") pod \"e3320608-2d14-467b-9fc6-5aece23f7f84\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.230331 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwntr\" (UniqueName: \"kubernetes.io/projected/e3320608-2d14-467b-9fc6-5aece23f7f84-kube-api-access-vwntr\") pod \"e3320608-2d14-467b-9fc6-5aece23f7f84\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.230396 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-scripts\") pod \"e3320608-2d14-467b-9fc6-5aece23f7f84\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.230462 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-combined-ca-bundle\") pod \"e3320608-2d14-467b-9fc6-5aece23f7f84\" (UID: \"e3320608-2d14-467b-9fc6-5aece23f7f84\") " Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.231608 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3320608-2d14-467b-9fc6-5aece23f7f84-logs" (OuterVolumeSpecName: "logs") pod "e3320608-2d14-467b-9fc6-5aece23f7f84" (UID: "e3320608-2d14-467b-9fc6-5aece23f7f84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.236579 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3320608-2d14-467b-9fc6-5aece23f7f84-kube-api-access-vwntr" (OuterVolumeSpecName: "kube-api-access-vwntr") pod "e3320608-2d14-467b-9fc6-5aece23f7f84" (UID: "e3320608-2d14-467b-9fc6-5aece23f7f84"). InnerVolumeSpecName "kube-api-access-vwntr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.237666 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-scripts" (OuterVolumeSpecName: "scripts") pod "e3320608-2d14-467b-9fc6-5aece23f7f84" (UID: "e3320608-2d14-467b-9fc6-5aece23f7f84"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.254522 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3320608-2d14-467b-9fc6-5aece23f7f84" (UID: "e3320608-2d14-467b-9fc6-5aece23f7f84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.260013 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-config-data" (OuterVolumeSpecName: "config-data") pod "e3320608-2d14-467b-9fc6-5aece23f7f84" (UID: "e3320608-2d14-467b-9fc6-5aece23f7f84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.332348 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.332394 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3320608-2d14-467b-9fc6-5aece23f7f84-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.332403 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.332411 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwntr\" (UniqueName: \"kubernetes.io/projected/e3320608-2d14-467b-9fc6-5aece23f7f84-kube-api-access-vwntr\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.332422 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3320608-2d14-467b-9fc6-5aece23f7f84-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.727945 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zrw2c" event={"ID":"e3320608-2d14-467b-9fc6-5aece23f7f84","Type":"ContainerDied","Data":"c841b1f4b2228b593bc6670419402ece45fac831835660fce133c131491bbefd"} Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.727988 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zrw2c" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.727995 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c841b1f4b2228b593bc6670419402ece45fac831835660fce133c131491bbefd" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.892485 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7f44878d9d-49bh8"] Jan 28 17:18:54 crc kubenswrapper[4811]: E0128 17:18:54.893040 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3320608-2d14-467b-9fc6-5aece23f7f84" containerName="placement-db-sync" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.893066 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3320608-2d14-467b-9fc6-5aece23f7f84" containerName="placement-db-sync" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.893295 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3320608-2d14-467b-9fc6-5aece23f7f84" containerName="placement-db-sync" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.894710 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.897473 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.897790 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.899163 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kt5gp" Jan 28 17:18:54 crc kubenswrapper[4811]: I0128 17:18:54.913090 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7f44878d9d-49bh8"] Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.050352 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-logs\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.051135 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-scripts\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.051289 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-config-data\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.051485 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-combined-ca-bundle\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.051664 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6bj5\" (UniqueName: \"kubernetes.io/projected/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-kube-api-access-f6bj5\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.153875 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6bj5\" (UniqueName: \"kubernetes.io/projected/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-kube-api-access-f6bj5\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.153936 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-logs\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.154051 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-scripts\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.154080 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-config-data\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.154107 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-combined-ca-bundle\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.154469 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-logs\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.160505 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-config-data\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.163915 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-combined-ca-bundle\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.164238 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-scripts\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.172880 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6bj5\" (UniqueName: \"kubernetes.io/projected/2fede79a-e5e1-4a4d-93d0-de61e60c2e43-kube-api-access-f6bj5\") pod \"placement-7f44878d9d-49bh8\" (UID: \"2fede79a-e5e1-4a4d-93d0-de61e60c2e43\") " pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.215826 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.602929 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7f44878d9d-49bh8"] Jan 28 17:18:55 crc kubenswrapper[4811]: W0128 17:18:55.644476 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fede79a_e5e1_4a4d_93d0_de61e60c2e43.slice/crio-6279f6ca05d271d1c1f7c0a9dfe7eeccbe75621a9871db42ef2de66bbb65160f WatchSource:0}: Error finding container 6279f6ca05d271d1c1f7c0a9dfe7eeccbe75621a9871db42ef2de66bbb65160f: Status 404 returned error can't find the container with id 6279f6ca05d271d1c1f7c0a9dfe7eeccbe75621a9871db42ef2de66bbb65160f Jan 28 17:18:55 crc kubenswrapper[4811]: I0128 17:18:55.737817 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f44878d9d-49bh8" event={"ID":"2fede79a-e5e1-4a4d-93d0-de61e60c2e43","Type":"ContainerStarted","Data":"6279f6ca05d271d1c1f7c0a9dfe7eeccbe75621a9871db42ef2de66bbb65160f"} Jan 28 17:18:56 crc kubenswrapper[4811]: I0128 17:18:56.749392 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f44878d9d-49bh8" event={"ID":"2fede79a-e5e1-4a4d-93d0-de61e60c2e43","Type":"ContainerStarted","Data":"dba50f861fc8d8763eeee4a9c25a60cfe691c6138e7911d2852d48ba65def12c"} Jan 28 17:18:56 crc kubenswrapper[4811]: I0128 17:18:56.749612 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f44878d9d-49bh8" event={"ID":"2fede79a-e5e1-4a4d-93d0-de61e60c2e43","Type":"ContainerStarted","Data":"3c8f366e480700da74f615d3572d29a1e4209a475e20ed112be7acc1c93ae4b2"} Jan 28 17:18:56 crc kubenswrapper[4811]: I0128 17:18:56.780892 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7f44878d9d-49bh8" podStartSLOduration=2.780863814 podStartE2EDuration="2.780863814s" podCreationTimestamp="2026-01-28 17:18:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:18:56.768017934 +0000 UTC m=+5629.522381517" watchObservedRunningTime="2026-01-28 17:18:56.780863814 +0000 UTC m=+5629.535227407" Jan 28 17:18:57 crc kubenswrapper[4811]: I0128 17:18:57.757922 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:18:57 crc kubenswrapper[4811]: I0128 17:18:57.758295 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:19:00 crc kubenswrapper[4811]: I0128 17:19:00.326812 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:19:00 crc kubenswrapper[4811]: I0128 17:19:00.398072 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-857b556495-wb2l9"] Jan 28 17:19:00 crc kubenswrapper[4811]: I0128 17:19:00.398361 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-857b556495-wb2l9" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerName="dnsmasq-dns" containerID="cri-o://941e02617f279548eec7d269deda6b51a6325179296377fe72f3286b85dc5c98" gracePeriod=10 Jan 28 17:19:00 crc kubenswrapper[4811]: I0128 17:19:00.807693 4811 generic.go:334] "Generic (PLEG): container finished" podID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerID="941e02617f279548eec7d269deda6b51a6325179296377fe72f3286b85dc5c98" exitCode=0 Jan 28 17:19:00 crc kubenswrapper[4811]: I0128 17:19:00.807874 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-857b556495-wb2l9" event={"ID":"2cd0bfee-b71c-4898-bc28-8ec123f17177","Type":"ContainerDied","Data":"941e02617f279548eec7d269deda6b51a6325179296377fe72f3286b85dc5c98"} Jan 28 17:19:00 crc kubenswrapper[4811]: I0128 17:19:00.936074 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.081306 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-nb\") pod \"2cd0bfee-b71c-4898-bc28-8ec123f17177\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.081835 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-sb\") pod \"2cd0bfee-b71c-4898-bc28-8ec123f17177\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.082418 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-config\") pod \"2cd0bfee-b71c-4898-bc28-8ec123f17177\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.082586 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc7hs\" (UniqueName: \"kubernetes.io/projected/2cd0bfee-b71c-4898-bc28-8ec123f17177-kube-api-access-pc7hs\") pod \"2cd0bfee-b71c-4898-bc28-8ec123f17177\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.082714 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-dns-svc\") pod \"2cd0bfee-b71c-4898-bc28-8ec123f17177\" (UID: \"2cd0bfee-b71c-4898-bc28-8ec123f17177\") " Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.088567 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd0bfee-b71c-4898-bc28-8ec123f17177-kube-api-access-pc7hs" (OuterVolumeSpecName: "kube-api-access-pc7hs") pod "2cd0bfee-b71c-4898-bc28-8ec123f17177" (UID: "2cd0bfee-b71c-4898-bc28-8ec123f17177"). InnerVolumeSpecName "kube-api-access-pc7hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.130876 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2cd0bfee-b71c-4898-bc28-8ec123f17177" (UID: "2cd0bfee-b71c-4898-bc28-8ec123f17177"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.133166 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-config" (OuterVolumeSpecName: "config") pod "2cd0bfee-b71c-4898-bc28-8ec123f17177" (UID: "2cd0bfee-b71c-4898-bc28-8ec123f17177"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.139787 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2cd0bfee-b71c-4898-bc28-8ec123f17177" (UID: "2cd0bfee-b71c-4898-bc28-8ec123f17177"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.141818 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2cd0bfee-b71c-4898-bc28-8ec123f17177" (UID: "2cd0bfee-b71c-4898-bc28-8ec123f17177"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.184961 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.184992 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.185001 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.185011 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc7hs\" (UniqueName: \"kubernetes.io/projected/2cd0bfee-b71c-4898-bc28-8ec123f17177-kube-api-access-pc7hs\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.185020 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd0bfee-b71c-4898-bc28-8ec123f17177-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.823862 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-857b556495-wb2l9" event={"ID":"2cd0bfee-b71c-4898-bc28-8ec123f17177","Type":"ContainerDied","Data":"bd4173b431976562d2121291119c48d6872d1ef0a171b1327ca8df43d66afbf9"} Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.823899 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-857b556495-wb2l9" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.823914 4811 scope.go:117] "RemoveContainer" containerID="941e02617f279548eec7d269deda6b51a6325179296377fe72f3286b85dc5c98" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.846669 4811 scope.go:117] "RemoveContainer" containerID="f3845bba5fe644274db6165bfc0fc888af923a5d6f4b01677f5543aa37e2c3df" Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.855931 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-857b556495-wb2l9"] Jan 28 17:19:01 crc kubenswrapper[4811]: I0128 17:19:01.863331 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-857b556495-wb2l9"] Jan 28 17:19:02 crc kubenswrapper[4811]: I0128 17:19:02.356777 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" path="/var/lib/kubelet/pods/2cd0bfee-b71c-4898-bc28-8ec123f17177/volumes" Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.087634 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.088278 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.088337 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.090584 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.090652 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" gracePeriod=600 Jan 28 17:19:03 crc kubenswrapper[4811]: E0128 17:19:03.226023 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.844187 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" exitCode=0 Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.844243 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091"} Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.844283 4811 scope.go:117] "RemoveContainer" containerID="aa24462d3859954f62e518a2517851c0125e8860de36202094bb80d1adc65f98" Jan 28 17:19:03 crc kubenswrapper[4811]: I0128 17:19:03.844957 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:19:03 crc kubenswrapper[4811]: E0128 17:19:03.845281 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:19:16 crc kubenswrapper[4811]: I0128 17:19:16.340151 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:19:16 crc kubenswrapper[4811]: E0128 17:19:16.340908 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:19:26 crc kubenswrapper[4811]: I0128 17:19:26.246706 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:19:26 crc kubenswrapper[4811]: I0128 17:19:26.247936 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7f44878d9d-49bh8" Jan 28 17:19:27 crc kubenswrapper[4811]: I0128 17:19:27.349383 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:19:27 crc kubenswrapper[4811]: E0128 17:19:27.349944 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:19:30 crc kubenswrapper[4811]: I0128 17:19:30.745966 4811 scope.go:117] "RemoveContainer" containerID="d595342cfabbc9b91f4fdbf9dc72ea898825c6fd1e6f1419adb950fe2bedc8fd" Jan 28 17:19:30 crc kubenswrapper[4811]: I0128 17:19:30.770119 4811 scope.go:117] "RemoveContainer" containerID="4622ad9afdb33a29347f45c62674af5be3a37f9892085baf08f97403153ff8a1" Jan 28 17:19:30 crc kubenswrapper[4811]: I0128 17:19:30.797386 4811 scope.go:117] "RemoveContainer" containerID="1a6dad311808ca0727fae58882ce830a86a4ad82dea6db2166930f8d37c5afc1" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.067977 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9d2b8"] Jan 28 17:19:33 crc kubenswrapper[4811]: E0128 17:19:33.069126 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerName="dnsmasq-dns" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.069145 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerName="dnsmasq-dns" Jan 28 17:19:33 crc kubenswrapper[4811]: E0128 17:19:33.069176 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerName="init" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.069183 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerName="init" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.069393 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cd0bfee-b71c-4898-bc28-8ec123f17177" containerName="dnsmasq-dns" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.071034 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.080853 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9d2b8"] Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.169413 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-utilities\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.169671 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lplvl\" (UniqueName: \"kubernetes.io/projected/fd038518-b6df-4f2f-9246-6b03645aa0a6-kube-api-access-lplvl\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.169709 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-catalog-content\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.271215 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lplvl\" (UniqueName: \"kubernetes.io/projected/fd038518-b6df-4f2f-9246-6b03645aa0a6-kube-api-access-lplvl\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.271269 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-catalog-content\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.271340 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-utilities\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.272019 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-catalog-content\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.272031 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-utilities\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.295595 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lplvl\" (UniqueName: \"kubernetes.io/projected/fd038518-b6df-4f2f-9246-6b03645aa0a6-kube-api-access-lplvl\") pod \"redhat-marketplace-9d2b8\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.391669 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:33 crc kubenswrapper[4811]: I0128 17:19:33.855131 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9d2b8"] Jan 28 17:19:34 crc kubenswrapper[4811]: I0128 17:19:34.120188 4811 generic.go:334] "Generic (PLEG): container finished" podID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerID="c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9" exitCode=0 Jan 28 17:19:34 crc kubenswrapper[4811]: I0128 17:19:34.120346 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9d2b8" event={"ID":"fd038518-b6df-4f2f-9246-6b03645aa0a6","Type":"ContainerDied","Data":"c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9"} Jan 28 17:19:34 crc kubenswrapper[4811]: I0128 17:19:34.120486 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9d2b8" event={"ID":"fd038518-b6df-4f2f-9246-6b03645aa0a6","Type":"ContainerStarted","Data":"8f036244994e08fbed75d81f927fd9896fd0850bb7a188d65f001fc9cd30b93f"} Jan 28 17:19:34 crc kubenswrapper[4811]: I0128 17:19:34.122450 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:19:35 crc kubenswrapper[4811]: I0128 17:19:35.130896 4811 generic.go:334] "Generic (PLEG): container finished" podID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerID="1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8" exitCode=0 Jan 28 17:19:35 crc kubenswrapper[4811]: I0128 17:19:35.131002 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9d2b8" event={"ID":"fd038518-b6df-4f2f-9246-6b03645aa0a6","Type":"ContainerDied","Data":"1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8"} Jan 28 17:19:36 crc kubenswrapper[4811]: I0128 17:19:36.140589 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9d2b8" event={"ID":"fd038518-b6df-4f2f-9246-6b03645aa0a6","Type":"ContainerStarted","Data":"caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce"} Jan 28 17:19:36 crc kubenswrapper[4811]: I0128 17:19:36.165676 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9d2b8" podStartSLOduration=1.6827512919999998 podStartE2EDuration="3.165655159s" podCreationTimestamp="2026-01-28 17:19:33 +0000 UTC" firstStartedPulling="2026-01-28 17:19:34.122151685 +0000 UTC m=+5666.876515268" lastFinishedPulling="2026-01-28 17:19:35.605055552 +0000 UTC m=+5668.359419135" observedRunningTime="2026-01-28 17:19:36.15907938 +0000 UTC m=+5668.913442973" watchObservedRunningTime="2026-01-28 17:19:36.165655159 +0000 UTC m=+5668.920018742" Jan 28 17:19:40 crc kubenswrapper[4811]: I0128 17:19:40.340012 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:19:40 crc kubenswrapper[4811]: E0128 17:19:40.340710 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:19:43 crc kubenswrapper[4811]: I0128 17:19:43.391911 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:43 crc kubenswrapper[4811]: I0128 17:19:43.392286 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:43 crc kubenswrapper[4811]: I0128 17:19:43.442200 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:44 crc kubenswrapper[4811]: I0128 17:19:44.249224 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:44 crc kubenswrapper[4811]: I0128 17:19:44.301206 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9d2b8"] Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.219622 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9d2b8" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="registry-server" containerID="cri-o://caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce" gracePeriod=2 Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.675163 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.694002 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lplvl\" (UniqueName: \"kubernetes.io/projected/fd038518-b6df-4f2f-9246-6b03645aa0a6-kube-api-access-lplvl\") pod \"fd038518-b6df-4f2f-9246-6b03645aa0a6\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.694121 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-utilities\") pod \"fd038518-b6df-4f2f-9246-6b03645aa0a6\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.694160 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-catalog-content\") pod \"fd038518-b6df-4f2f-9246-6b03645aa0a6\" (UID: \"fd038518-b6df-4f2f-9246-6b03645aa0a6\") " Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.696347 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-utilities" (OuterVolumeSpecName: "utilities") pod "fd038518-b6df-4f2f-9246-6b03645aa0a6" (UID: "fd038518-b6df-4f2f-9246-6b03645aa0a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.701865 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd038518-b6df-4f2f-9246-6b03645aa0a6-kube-api-access-lplvl" (OuterVolumeSpecName: "kube-api-access-lplvl") pod "fd038518-b6df-4f2f-9246-6b03645aa0a6" (UID: "fd038518-b6df-4f2f-9246-6b03645aa0a6"). InnerVolumeSpecName "kube-api-access-lplvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.732221 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd038518-b6df-4f2f-9246-6b03645aa0a6" (UID: "fd038518-b6df-4f2f-9246-6b03645aa0a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.795258 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lplvl\" (UniqueName: \"kubernetes.io/projected/fd038518-b6df-4f2f-9246-6b03645aa0a6-kube-api-access-lplvl\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.795304 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:46 crc kubenswrapper[4811]: I0128 17:19:46.795316 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd038518-b6df-4f2f-9246-6b03645aa0a6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.228299 4811 generic.go:334] "Generic (PLEG): container finished" podID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerID="caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce" exitCode=0 Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.228366 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9d2b8" event={"ID":"fd038518-b6df-4f2f-9246-6b03645aa0a6","Type":"ContainerDied","Data":"caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce"} Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.228425 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9d2b8" event={"ID":"fd038518-b6df-4f2f-9246-6b03645aa0a6","Type":"ContainerDied","Data":"8f036244994e08fbed75d81f927fd9896fd0850bb7a188d65f001fc9cd30b93f"} Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.228464 4811 scope.go:117] "RemoveContainer" containerID="caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.228378 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9d2b8" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.258397 4811 scope.go:117] "RemoveContainer" containerID="1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.259456 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9d2b8"] Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.269694 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9d2b8"] Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.301924 4811 scope.go:117] "RemoveContainer" containerID="c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.342065 4811 scope.go:117] "RemoveContainer" containerID="caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce" Jan 28 17:19:47 crc kubenswrapper[4811]: E0128 17:19:47.342411 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce\": container with ID starting with caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce not found: ID does not exist" containerID="caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.342482 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce"} err="failed to get container status \"caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce\": rpc error: code = NotFound desc = could not find container \"caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce\": container with ID starting with caa3aad65deb8957ae02a0a4d1e8e75e16b767ce805413c4348c2616fc829cce not found: ID does not exist" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.342516 4811 scope.go:117] "RemoveContainer" containerID="1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8" Jan 28 17:19:47 crc kubenswrapper[4811]: E0128 17:19:47.342848 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8\": container with ID starting with 1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8 not found: ID does not exist" containerID="1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.342876 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8"} err="failed to get container status \"1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8\": rpc error: code = NotFound desc = could not find container \"1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8\": container with ID starting with 1188a332b9ca1cac97148dfdd166f00f8049c99f50e55226c47f6adf30ff65d8 not found: ID does not exist" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.342898 4811 scope.go:117] "RemoveContainer" containerID="c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9" Jan 28 17:19:47 crc kubenswrapper[4811]: E0128 17:19:47.343208 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9\": container with ID starting with c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9 not found: ID does not exist" containerID="c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.343228 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9"} err="failed to get container status \"c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9\": rpc error: code = NotFound desc = could not find container \"c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9\": container with ID starting with c3611ddea2dbe0dbc8ca42a6c54565c50f720f57b84f103c675e5e75169fc2f9 not found: ID does not exist" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.902165 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-6nhjc"] Jan 28 17:19:47 crc kubenswrapper[4811]: E0128 17:19:47.902944 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="registry-server" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.902965 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="registry-server" Jan 28 17:19:47 crc kubenswrapper[4811]: E0128 17:19:47.902978 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="extract-content" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.902986 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="extract-content" Jan 28 17:19:47 crc kubenswrapper[4811]: E0128 17:19:47.903028 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="extract-utilities" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.903036 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="extract-utilities" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.903237 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" containerName="registry-server" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.903954 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.914304 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq6tz\" (UniqueName: \"kubernetes.io/projected/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-kube-api-access-tq6tz\") pod \"nova-api-db-create-6nhjc\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.914407 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-operator-scripts\") pod \"nova-api-db-create-6nhjc\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.916182 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6nhjc"] Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.990290 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-8gn9v"] Jan 28 17:19:47 crc kubenswrapper[4811]: I0128 17:19:47.991343 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.007211 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-8gn9v"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.015468 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq6tz\" (UniqueName: \"kubernetes.io/projected/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-kube-api-access-tq6tz\") pod \"nova-api-db-create-6nhjc\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.015547 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xqnq\" (UniqueName: \"kubernetes.io/projected/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-kube-api-access-7xqnq\") pod \"nova-cell0-db-create-8gn9v\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.015600 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-operator-scripts\") pod \"nova-api-db-create-6nhjc\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.015787 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-operator-scripts\") pod \"nova-cell0-db-create-8gn9v\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.016278 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-operator-scripts\") pod \"nova-api-db-create-6nhjc\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.038105 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq6tz\" (UniqueName: \"kubernetes.io/projected/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-kube-api-access-tq6tz\") pod \"nova-api-db-create-6nhjc\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.110233 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-3bdf-account-create-update-hgqfn"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.122425 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.123464 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-operator-scripts\") pod \"nova-cell0-db-create-8gn9v\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.123574 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xqnq\" (UniqueName: \"kubernetes.io/projected/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-kube-api-access-7xqnq\") pod \"nova-cell0-db-create-8gn9v\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.125228 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-operator-scripts\") pod \"nova-cell0-db-create-8gn9v\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.125529 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.126450 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3bdf-account-create-update-hgqfn"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.160016 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xqnq\" (UniqueName: \"kubernetes.io/projected/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-kube-api-access-7xqnq\") pod \"nova-cell0-db-create-8gn9v\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.202940 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-99rvc"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.204117 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.211558 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-99rvc"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.225249 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.225582 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1940218-22f9-4af9-b2ff-eb1b19e318f4-operator-scripts\") pod \"nova-api-3bdf-account-create-update-hgqfn\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.225656 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd8tl\" (UniqueName: \"kubernetes.io/projected/b1940218-22f9-4af9-b2ff-eb1b19e318f4-kube-api-access-fd8tl\") pod \"nova-api-3bdf-account-create-update-hgqfn\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.308873 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.323108 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-ec47-account-create-update-dnqjz"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.324468 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.329503 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1940218-22f9-4af9-b2ff-eb1b19e318f4-operator-scripts\") pod \"nova-api-3bdf-account-create-update-hgqfn\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.329568 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/470404eb-f10d-46f3-af1a-a173e2fbae0c-operator-scripts\") pod \"nova-cell1-db-create-99rvc\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.329603 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd8tl\" (UniqueName: \"kubernetes.io/projected/b1940218-22f9-4af9-b2ff-eb1b19e318f4-kube-api-access-fd8tl\") pod \"nova-api-3bdf-account-create-update-hgqfn\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.329632 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv5qz\" (UniqueName: \"kubernetes.io/projected/470404eb-f10d-46f3-af1a-a173e2fbae0c-kube-api-access-rv5qz\") pod \"nova-cell1-db-create-99rvc\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.330488 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.330700 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1940218-22f9-4af9-b2ff-eb1b19e318f4-operator-scripts\") pod \"nova-api-3bdf-account-create-update-hgqfn\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.336205 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ec47-account-create-update-dnqjz"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.367754 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd038518-b6df-4f2f-9246-6b03645aa0a6" path="/var/lib/kubelet/pods/fd038518-b6df-4f2f-9246-6b03645aa0a6/volumes" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.369158 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd8tl\" (UniqueName: \"kubernetes.io/projected/b1940218-22f9-4af9-b2ff-eb1b19e318f4-kube-api-access-fd8tl\") pod \"nova-api-3bdf-account-create-update-hgqfn\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.431500 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl9d4\" (UniqueName: \"kubernetes.io/projected/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-kube-api-access-bl9d4\") pod \"nova-cell0-ec47-account-create-update-dnqjz\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.431589 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/470404eb-f10d-46f3-af1a-a173e2fbae0c-operator-scripts\") pod \"nova-cell1-db-create-99rvc\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.431632 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv5qz\" (UniqueName: \"kubernetes.io/projected/470404eb-f10d-46f3-af1a-a173e2fbae0c-kube-api-access-rv5qz\") pod \"nova-cell1-db-create-99rvc\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.431702 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-operator-scripts\") pod \"nova-cell0-ec47-account-create-update-dnqjz\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.432557 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/470404eb-f10d-46f3-af1a-a173e2fbae0c-operator-scripts\") pod \"nova-cell1-db-create-99rvc\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.448945 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.472684 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv5qz\" (UniqueName: \"kubernetes.io/projected/470404eb-f10d-46f3-af1a-a173e2fbae0c-kube-api-access-rv5qz\") pod \"nova-cell1-db-create-99rvc\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.513232 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-eddd-account-create-update-zkh4b"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.514390 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.517859 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.521086 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-eddd-account-create-update-zkh4b"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.533060 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-operator-scripts\") pod \"nova-cell0-ec47-account-create-update-dnqjz\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.533174 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl9d4\" (UniqueName: \"kubernetes.io/projected/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-kube-api-access-bl9d4\") pod \"nova-cell0-ec47-account-create-update-dnqjz\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.534217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-operator-scripts\") pod \"nova-cell0-ec47-account-create-update-dnqjz\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.540264 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.551284 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl9d4\" (UniqueName: \"kubernetes.io/projected/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-kube-api-access-bl9d4\") pod \"nova-cell0-ec47-account-create-update-dnqjz\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.635303 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-operator-scripts\") pod \"nova-cell1-eddd-account-create-update-zkh4b\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.635876 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tw9d\" (UniqueName: \"kubernetes.io/projected/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-kube-api-access-6tw9d\") pod \"nova-cell1-eddd-account-create-update-zkh4b\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.663916 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.737905 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-operator-scripts\") pod \"nova-cell1-eddd-account-create-update-zkh4b\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.738295 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tw9d\" (UniqueName: \"kubernetes.io/projected/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-kube-api-access-6tw9d\") pod \"nova-cell1-eddd-account-create-update-zkh4b\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.739661 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-operator-scripts\") pod \"nova-cell1-eddd-account-create-update-zkh4b\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.757528 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tw9d\" (UniqueName: \"kubernetes.io/projected/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-kube-api-access-6tw9d\") pod \"nova-cell1-eddd-account-create-update-zkh4b\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.872326 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6nhjc"] Jan 28 17:19:48 crc kubenswrapper[4811]: I0128 17:19:48.938877 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.042887 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-8gn9v"] Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.063655 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ec47-account-create-update-dnqjz"] Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.081333 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3bdf-account-create-update-hgqfn"] Jan 28 17:19:49 crc kubenswrapper[4811]: W0128 17:19:49.138563 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1cae5db_f32e_4625_8331_a2e38a0eaa6b.slice/crio-0ae756027afaad7a900c5dfa4ca450e903d7f58355a1b3346766d13773b9cb7b WatchSource:0}: Error finding container 0ae756027afaad7a900c5dfa4ca450e903d7f58355a1b3346766d13773b9cb7b: Status 404 returned error can't find the container with id 0ae756027afaad7a900c5dfa4ca450e903d7f58355a1b3346766d13773b9cb7b Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.207901 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-99rvc"] Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.301672 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" event={"ID":"b1940218-22f9-4af9-b2ff-eb1b19e318f4","Type":"ContainerStarted","Data":"853c954723dcb332e11ccced170802a1573a8c96dd632f38044cb374b6e90fdf"} Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.303496 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6nhjc" event={"ID":"1017da74-ab67-432c-8eb9-8b6b8b7a1aac","Type":"ContainerStarted","Data":"afd332a95cc20873bec1981d3b147860bb73bcc23c8c8ed783cf1de713d3f315"} Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.305304 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" event={"ID":"e1cae5db-f32e-4625-8331-a2e38a0eaa6b","Type":"ContainerStarted","Data":"0ae756027afaad7a900c5dfa4ca450e903d7f58355a1b3346766d13773b9cb7b"} Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.307995 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8gn9v" event={"ID":"63e69842-46ee-4ad6-a516-1d9a9b9e96c5","Type":"ContainerStarted","Data":"6dfb88355e2595f774b9d9d0efb80aa5d6acff184c53711d9a465e9652123d19"} Jan 28 17:19:49 crc kubenswrapper[4811]: I0128 17:19:49.655910 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-eddd-account-create-update-zkh4b"] Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.319867 4811 generic.go:334] "Generic (PLEG): container finished" podID="fdc78eb0-da28-4fa6-92a4-bb049a6e6196" containerID="37703212b593fd9ae93897220f10c170c22fb81bf9d081d708c345f6d43c5545" exitCode=0 Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.320197 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" event={"ID":"fdc78eb0-da28-4fa6-92a4-bb049a6e6196","Type":"ContainerDied","Data":"37703212b593fd9ae93897220f10c170c22fb81bf9d081d708c345f6d43c5545"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.320226 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" event={"ID":"fdc78eb0-da28-4fa6-92a4-bb049a6e6196","Type":"ContainerStarted","Data":"5cf0ac1a52e3c501ae8f88ae94f12051be775b380bc08c5243ef1b0764ce1436"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.323301 4811 generic.go:334] "Generic (PLEG): container finished" podID="e1cae5db-f32e-4625-8331-a2e38a0eaa6b" containerID="5283b1d61528f78cba45ccc77e3c2d3739cb9c697ef7448f7615cb23e66ed4a4" exitCode=0 Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.323335 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" event={"ID":"e1cae5db-f32e-4625-8331-a2e38a0eaa6b","Type":"ContainerDied","Data":"5283b1d61528f78cba45ccc77e3c2d3739cb9c697ef7448f7615cb23e66ed4a4"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.325638 4811 generic.go:334] "Generic (PLEG): container finished" podID="63e69842-46ee-4ad6-a516-1d9a9b9e96c5" containerID="8b68c13448507fcffa8c820fd66bbb75e52b3b9f22b1191581477d98731338db" exitCode=0 Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.325685 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8gn9v" event={"ID":"63e69842-46ee-4ad6-a516-1d9a9b9e96c5","Type":"ContainerDied","Data":"8b68c13448507fcffa8c820fd66bbb75e52b3b9f22b1191581477d98731338db"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.328645 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-99rvc" event={"ID":"470404eb-f10d-46f3-af1a-a173e2fbae0c","Type":"ContainerDied","Data":"8f9b11063f5240f101ecc491c17e4609c152174ccc2d7ffd20494b56a2bb4f67"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.328554 4811 generic.go:334] "Generic (PLEG): container finished" podID="470404eb-f10d-46f3-af1a-a173e2fbae0c" containerID="8f9b11063f5240f101ecc491c17e4609c152174ccc2d7ffd20494b56a2bb4f67" exitCode=0 Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.328926 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-99rvc" event={"ID":"470404eb-f10d-46f3-af1a-a173e2fbae0c","Type":"ContainerStarted","Data":"30481ce13c0675b75c2e2ff6e94770f9693a462d36225220cbc14f2f4d08a9ed"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.330953 4811 generic.go:334] "Generic (PLEG): container finished" podID="b1940218-22f9-4af9-b2ff-eb1b19e318f4" containerID="ef428f512bc211ec40f2a4a7c632a7c7929aedff48882995beb79cb7f80d9007" exitCode=0 Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.331041 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" event={"ID":"b1940218-22f9-4af9-b2ff-eb1b19e318f4","Type":"ContainerDied","Data":"ef428f512bc211ec40f2a4a7c632a7c7929aedff48882995beb79cb7f80d9007"} Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.332715 4811 generic.go:334] "Generic (PLEG): container finished" podID="1017da74-ab67-432c-8eb9-8b6b8b7a1aac" containerID="1a70d8d63e884eab873fec90dbd70e786e8aa501847ddf8c245860a58e535d77" exitCode=0 Jan 28 17:19:50 crc kubenswrapper[4811]: I0128 17:19:50.332753 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6nhjc" event={"ID":"1017da74-ab67-432c-8eb9-8b6b8b7a1aac","Type":"ContainerDied","Data":"1a70d8d63e884eab873fec90dbd70e786e8aa501847ddf8c245860a58e535d77"} Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.746570 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.807659 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xqnq\" (UniqueName: \"kubernetes.io/projected/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-kube-api-access-7xqnq\") pod \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.807764 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-operator-scripts\") pod \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\" (UID: \"63e69842-46ee-4ad6-a516-1d9a9b9e96c5\") " Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.808761 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63e69842-46ee-4ad6-a516-1d9a9b9e96c5" (UID: "63e69842-46ee-4ad6-a516-1d9a9b9e96c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.838819 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-kube-api-access-7xqnq" (OuterVolumeSpecName: "kube-api-access-7xqnq") pod "63e69842-46ee-4ad6-a516-1d9a9b9e96c5" (UID: "63e69842-46ee-4ad6-a516-1d9a9b9e96c5"). InnerVolumeSpecName "kube-api-access-7xqnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.910463 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xqnq\" (UniqueName: \"kubernetes.io/projected/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-kube-api-access-7xqnq\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.910501 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63e69842-46ee-4ad6-a516-1d9a9b9e96c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:51 crc kubenswrapper[4811]: I0128 17:19:51.995920 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.001496 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.020098 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.024974 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.038338 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113688 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1940218-22f9-4af9-b2ff-eb1b19e318f4-operator-scripts\") pod \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113751 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/470404eb-f10d-46f3-af1a-a173e2fbae0c-operator-scripts\") pod \"470404eb-f10d-46f3-af1a-a173e2fbae0c\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113798 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-operator-scripts\") pod \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113823 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd8tl\" (UniqueName: \"kubernetes.io/projected/b1940218-22f9-4af9-b2ff-eb1b19e318f4-kube-api-access-fd8tl\") pod \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\" (UID: \"b1940218-22f9-4af9-b2ff-eb1b19e318f4\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113842 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq6tz\" (UniqueName: \"kubernetes.io/projected/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-kube-api-access-tq6tz\") pod \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113867 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv5qz\" (UniqueName: \"kubernetes.io/projected/470404eb-f10d-46f3-af1a-a173e2fbae0c-kube-api-access-rv5qz\") pod \"470404eb-f10d-46f3-af1a-a173e2fbae0c\" (UID: \"470404eb-f10d-46f3-af1a-a173e2fbae0c\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113888 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-operator-scripts\") pod \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113933 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-operator-scripts\") pod \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\" (UID: \"1017da74-ab67-432c-8eb9-8b6b8b7a1aac\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.113950 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tw9d\" (UniqueName: \"kubernetes.io/projected/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-kube-api-access-6tw9d\") pod \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\" (UID: \"fdc78eb0-da28-4fa6-92a4-bb049a6e6196\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.114004 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl9d4\" (UniqueName: \"kubernetes.io/projected/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-kube-api-access-bl9d4\") pod \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\" (UID: \"e1cae5db-f32e-4625-8331-a2e38a0eaa6b\") " Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.119158 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-kube-api-access-tq6tz" (OuterVolumeSpecName: "kube-api-access-tq6tz") pod "1017da74-ab67-432c-8eb9-8b6b8b7a1aac" (UID: "1017da74-ab67-432c-8eb9-8b6b8b7a1aac"). InnerVolumeSpecName "kube-api-access-tq6tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.119586 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1940218-22f9-4af9-b2ff-eb1b19e318f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1940218-22f9-4af9-b2ff-eb1b19e318f4" (UID: "b1940218-22f9-4af9-b2ff-eb1b19e318f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.120005 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/470404eb-f10d-46f3-af1a-a173e2fbae0c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "470404eb-f10d-46f3-af1a-a173e2fbae0c" (UID: "470404eb-f10d-46f3-af1a-a173e2fbae0c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.120421 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fdc78eb0-da28-4fa6-92a4-bb049a6e6196" (UID: "fdc78eb0-da28-4fa6-92a4-bb049a6e6196"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.122676 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-kube-api-access-bl9d4" (OuterVolumeSpecName: "kube-api-access-bl9d4") pod "e1cae5db-f32e-4625-8331-a2e38a0eaa6b" (UID: "e1cae5db-f32e-4625-8331-a2e38a0eaa6b"). InnerVolumeSpecName "kube-api-access-bl9d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.122797 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1017da74-ab67-432c-8eb9-8b6b8b7a1aac" (UID: "1017da74-ab67-432c-8eb9-8b6b8b7a1aac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.123507 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e1cae5db-f32e-4625-8331-a2e38a0eaa6b" (UID: "e1cae5db-f32e-4625-8331-a2e38a0eaa6b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.127991 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/470404eb-f10d-46f3-af1a-a173e2fbae0c-kube-api-access-rv5qz" (OuterVolumeSpecName: "kube-api-access-rv5qz") pod "470404eb-f10d-46f3-af1a-a173e2fbae0c" (UID: "470404eb-f10d-46f3-af1a-a173e2fbae0c"). InnerVolumeSpecName "kube-api-access-rv5qz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.129194 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1940218-22f9-4af9-b2ff-eb1b19e318f4-kube-api-access-fd8tl" (OuterVolumeSpecName: "kube-api-access-fd8tl") pod "b1940218-22f9-4af9-b2ff-eb1b19e318f4" (UID: "b1940218-22f9-4af9-b2ff-eb1b19e318f4"). InnerVolumeSpecName "kube-api-access-fd8tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.129374 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-kube-api-access-6tw9d" (OuterVolumeSpecName: "kube-api-access-6tw9d") pod "fdc78eb0-da28-4fa6-92a4-bb049a6e6196" (UID: "fdc78eb0-da28-4fa6-92a4-bb049a6e6196"). InnerVolumeSpecName "kube-api-access-6tw9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215890 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215931 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215941 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tw9d\" (UniqueName: \"kubernetes.io/projected/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-kube-api-access-6tw9d\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215951 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl9d4\" (UniqueName: \"kubernetes.io/projected/e1cae5db-f32e-4625-8331-a2e38a0eaa6b-kube-api-access-bl9d4\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215961 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1940218-22f9-4af9-b2ff-eb1b19e318f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215971 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/470404eb-f10d-46f3-af1a-a173e2fbae0c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215980 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdc78eb0-da28-4fa6-92a4-bb049a6e6196-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215989 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd8tl\" (UniqueName: \"kubernetes.io/projected/b1940218-22f9-4af9-b2ff-eb1b19e318f4-kube-api-access-fd8tl\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.215998 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq6tz\" (UniqueName: \"kubernetes.io/projected/1017da74-ab67-432c-8eb9-8b6b8b7a1aac-kube-api-access-tq6tz\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.216006 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv5qz\" (UniqueName: \"kubernetes.io/projected/470404eb-f10d-46f3-af1a-a173e2fbae0c-kube-api-access-rv5qz\") on node \"crc\" DevicePath \"\"" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.361276 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-99rvc" event={"ID":"470404eb-f10d-46f3-af1a-a173e2fbae0c","Type":"ContainerDied","Data":"30481ce13c0675b75c2e2ff6e94770f9693a462d36225220cbc14f2f4d08a9ed"} Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.361273 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-99rvc" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.361328 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30481ce13c0675b75c2e2ff6e94770f9693a462d36225220cbc14f2f4d08a9ed" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.366011 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.365998 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3bdf-account-create-update-hgqfn" event={"ID":"b1940218-22f9-4af9-b2ff-eb1b19e318f4","Type":"ContainerDied","Data":"853c954723dcb332e11ccced170802a1573a8c96dd632f38044cb374b6e90fdf"} Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.366094 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="853c954723dcb332e11ccced170802a1573a8c96dd632f38044cb374b6e90fdf" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.367862 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6nhjc" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.367850 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6nhjc" event={"ID":"1017da74-ab67-432c-8eb9-8b6b8b7a1aac","Type":"ContainerDied","Data":"afd332a95cc20873bec1981d3b147860bb73bcc23c8c8ed783cf1de713d3f315"} Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.368008 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afd332a95cc20873bec1981d3b147860bb73bcc23c8c8ed783cf1de713d3f315" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.369166 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" event={"ID":"fdc78eb0-da28-4fa6-92a4-bb049a6e6196","Type":"ContainerDied","Data":"5cf0ac1a52e3c501ae8f88ae94f12051be775b380bc08c5243ef1b0764ce1436"} Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.369191 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5cf0ac1a52e3c501ae8f88ae94f12051be775b380bc08c5243ef1b0764ce1436" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.369175 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-eddd-account-create-update-zkh4b" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.370561 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" event={"ID":"e1cae5db-f32e-4625-8331-a2e38a0eaa6b","Type":"ContainerDied","Data":"0ae756027afaad7a900c5dfa4ca450e903d7f58355a1b3346766d13773b9cb7b"} Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.370594 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ae756027afaad7a900c5dfa4ca450e903d7f58355a1b3346766d13773b9cb7b" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.370576 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ec47-account-create-update-dnqjz" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.372087 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8gn9v" event={"ID":"63e69842-46ee-4ad6-a516-1d9a9b9e96c5","Type":"ContainerDied","Data":"6dfb88355e2595f774b9d9d0efb80aa5d6acff184c53711d9a465e9652123d19"} Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.372191 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dfb88355e2595f774b9d9d0efb80aa5d6acff184c53711d9a465e9652123d19" Jan 28 17:19:52 crc kubenswrapper[4811]: I0128 17:19:52.372128 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8gn9v" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540210 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bk8c2"] Jan 28 17:19:53 crc kubenswrapper[4811]: E0128 17:19:53.540648 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1017da74-ab67-432c-8eb9-8b6b8b7a1aac" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540669 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1017da74-ab67-432c-8eb9-8b6b8b7a1aac" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: E0128 17:19:53.540685 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1940218-22f9-4af9-b2ff-eb1b19e318f4" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540691 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1940218-22f9-4af9-b2ff-eb1b19e318f4" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: E0128 17:19:53.540708 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdc78eb0-da28-4fa6-92a4-bb049a6e6196" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540715 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdc78eb0-da28-4fa6-92a4-bb049a6e6196" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: E0128 17:19:53.540726 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1cae5db-f32e-4625-8331-a2e38a0eaa6b" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540732 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1cae5db-f32e-4625-8331-a2e38a0eaa6b" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: E0128 17:19:53.540745 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63e69842-46ee-4ad6-a516-1d9a9b9e96c5" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540750 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="63e69842-46ee-4ad6-a516-1d9a9b9e96c5" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: E0128 17:19:53.540764 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470404eb-f10d-46f3-af1a-a173e2fbae0c" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540771 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="470404eb-f10d-46f3-af1a-a173e2fbae0c" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540915 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1017da74-ab67-432c-8eb9-8b6b8b7a1aac" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540931 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1940218-22f9-4af9-b2ff-eb1b19e318f4" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540940 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="63e69842-46ee-4ad6-a516-1d9a9b9e96c5" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540956 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdc78eb0-da28-4fa6-92a4-bb049a6e6196" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540967 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1cae5db-f32e-4625-8331-a2e38a0eaa6b" containerName="mariadb-account-create-update" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.540975 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="470404eb-f10d-46f3-af1a-a173e2fbae0c" containerName="mariadb-database-create" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.541551 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.551620 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-2rnmf" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.551831 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.560705 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.571363 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bk8c2"] Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.646378 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-scripts\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.646477 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-config-data\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.646516 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhw57\" (UniqueName: \"kubernetes.io/projected/d57f8a25-7690-4908-88e0-f228e953daac-kube-api-access-vhw57\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.646557 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.748070 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-scripts\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.748131 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-config-data\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.748153 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhw57\" (UniqueName: \"kubernetes.io/projected/d57f8a25-7690-4908-88e0-f228e953daac-kube-api-access-vhw57\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.748180 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.753182 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-scripts\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.753326 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.753948 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-config-data\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.765018 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhw57\" (UniqueName: \"kubernetes.io/projected/d57f8a25-7690-4908-88e0-f228e953daac-kube-api-access-vhw57\") pod \"nova-cell0-conductor-db-sync-bk8c2\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:53 crc kubenswrapper[4811]: I0128 17:19:53.863155 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:19:54 crc kubenswrapper[4811]: I0128 17:19:54.330992 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bk8c2"] Jan 28 17:19:54 crc kubenswrapper[4811]: I0128 17:19:54.392679 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" event={"ID":"d57f8a25-7690-4908-88e0-f228e953daac","Type":"ContainerStarted","Data":"aab63325eae249026f870cdacd99aeb1f93b0b7c490aecaeae096150c406e796"} Jan 28 17:19:55 crc kubenswrapper[4811]: I0128 17:19:55.339304 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:19:55 crc kubenswrapper[4811]: E0128 17:19:55.339909 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:19:55 crc kubenswrapper[4811]: I0128 17:19:55.417855 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" event={"ID":"d57f8a25-7690-4908-88e0-f228e953daac","Type":"ContainerStarted","Data":"34dc44328ff7640bf0d1d622839ae48b0134339e8f707e6a0c34eba85e6ae89a"} Jan 28 17:19:55 crc kubenswrapper[4811]: I0128 17:19:55.437561 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" podStartSLOduration=2.4375392590000002 podStartE2EDuration="2.437539259s" podCreationTimestamp="2026-01-28 17:19:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:19:55.431175476 +0000 UTC m=+5688.185539059" watchObservedRunningTime="2026-01-28 17:19:55.437539259 +0000 UTC m=+5688.191902842" Jan 28 17:20:00 crc kubenswrapper[4811]: I0128 17:20:00.498206 4811 generic.go:334] "Generic (PLEG): container finished" podID="d57f8a25-7690-4908-88e0-f228e953daac" containerID="34dc44328ff7640bf0d1d622839ae48b0134339e8f707e6a0c34eba85e6ae89a" exitCode=0 Jan 28 17:20:00 crc kubenswrapper[4811]: I0128 17:20:00.498540 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" event={"ID":"d57f8a25-7690-4908-88e0-f228e953daac","Type":"ContainerDied","Data":"34dc44328ff7640bf0d1d622839ae48b0134339e8f707e6a0c34eba85e6ae89a"} Jan 28 17:20:01 crc kubenswrapper[4811]: I0128 17:20:01.886853 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.037515 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-scripts\") pod \"d57f8a25-7690-4908-88e0-f228e953daac\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.037730 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-combined-ca-bundle\") pod \"d57f8a25-7690-4908-88e0-f228e953daac\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.038515 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-config-data\") pod \"d57f8a25-7690-4908-88e0-f228e953daac\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.038569 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhw57\" (UniqueName: \"kubernetes.io/projected/d57f8a25-7690-4908-88e0-f228e953daac-kube-api-access-vhw57\") pod \"d57f8a25-7690-4908-88e0-f228e953daac\" (UID: \"d57f8a25-7690-4908-88e0-f228e953daac\") " Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.043515 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-scripts" (OuterVolumeSpecName: "scripts") pod "d57f8a25-7690-4908-88e0-f228e953daac" (UID: "d57f8a25-7690-4908-88e0-f228e953daac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.044259 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d57f8a25-7690-4908-88e0-f228e953daac-kube-api-access-vhw57" (OuterVolumeSpecName: "kube-api-access-vhw57") pod "d57f8a25-7690-4908-88e0-f228e953daac" (UID: "d57f8a25-7690-4908-88e0-f228e953daac"). InnerVolumeSpecName "kube-api-access-vhw57". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.062022 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d57f8a25-7690-4908-88e0-f228e953daac" (UID: "d57f8a25-7690-4908-88e0-f228e953daac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.067797 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-config-data" (OuterVolumeSpecName: "config-data") pod "d57f8a25-7690-4908-88e0-f228e953daac" (UID: "d57f8a25-7690-4908-88e0-f228e953daac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.142222 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.142252 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhw57\" (UniqueName: \"kubernetes.io/projected/d57f8a25-7690-4908-88e0-f228e953daac-kube-api-access-vhw57\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.142266 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.142298 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57f8a25-7690-4908-88e0-f228e953daac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.520349 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" event={"ID":"d57f8a25-7690-4908-88e0-f228e953daac","Type":"ContainerDied","Data":"aab63325eae249026f870cdacd99aeb1f93b0b7c490aecaeae096150c406e796"} Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.520394 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aab63325eae249026f870cdacd99aeb1f93b0b7c490aecaeae096150c406e796" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.520422 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bk8c2" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.615870 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:20:02 crc kubenswrapper[4811]: E0128 17:20:02.616342 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d57f8a25-7690-4908-88e0-f228e953daac" containerName="nova-cell0-conductor-db-sync" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.616364 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d57f8a25-7690-4908-88e0-f228e953daac" containerName="nova-cell0-conductor-db-sync" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.616580 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d57f8a25-7690-4908-88e0-f228e953daac" containerName="nova-cell0-conductor-db-sync" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.617287 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.620094 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.620798 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-2rnmf" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.636204 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.752796 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf77b\" (UniqueName: \"kubernetes.io/projected/aedc4187-66e7-4af9-b508-5d6282f4a937-kube-api-access-bf77b\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.752890 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.753111 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.854206 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf77b\" (UniqueName: \"kubernetes.io/projected/aedc4187-66e7-4af9-b508-5d6282f4a937-kube-api-access-bf77b\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.854259 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.854336 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.857999 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.858287 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.870146 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf77b\" (UniqueName: \"kubernetes.io/projected/aedc4187-66e7-4af9-b508-5d6282f4a937-kube-api-access-bf77b\") pod \"nova-cell0-conductor-0\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:02 crc kubenswrapper[4811]: I0128 17:20:02.936580 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:03 crc kubenswrapper[4811]: I0128 17:20:03.374800 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:20:03 crc kubenswrapper[4811]: W0128 17:20:03.377846 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaedc4187_66e7_4af9_b508_5d6282f4a937.slice/crio-caa2f50b99802eff3eed940ec29f6b935f7f179d713280592ec4eca81f5ba1fe WatchSource:0}: Error finding container caa2f50b99802eff3eed940ec29f6b935f7f179d713280592ec4eca81f5ba1fe: Status 404 returned error can't find the container with id caa2f50b99802eff3eed940ec29f6b935f7f179d713280592ec4eca81f5ba1fe Jan 28 17:20:03 crc kubenswrapper[4811]: I0128 17:20:03.529797 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"aedc4187-66e7-4af9-b508-5d6282f4a937","Type":"ContainerStarted","Data":"caa2f50b99802eff3eed940ec29f6b935f7f179d713280592ec4eca81f5ba1fe"} Jan 28 17:20:04 crc kubenswrapper[4811]: I0128 17:20:04.541514 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"aedc4187-66e7-4af9-b508-5d6282f4a937","Type":"ContainerStarted","Data":"9b65aefbe5958d180fb7ea04bfe17f85779eeaee70d605e4ca9ba2f51ffe4f87"} Jan 28 17:20:04 crc kubenswrapper[4811]: I0128 17:20:04.543093 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:04 crc kubenswrapper[4811]: I0128 17:20:04.567930 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.567903555 podStartE2EDuration="2.567903555s" podCreationTimestamp="2026-01-28 17:20:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:04.557524203 +0000 UTC m=+5697.311887796" watchObservedRunningTime="2026-01-28 17:20:04.567903555 +0000 UTC m=+5697.322267148" Jan 28 17:20:07 crc kubenswrapper[4811]: I0128 17:20:07.340181 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:20:07 crc kubenswrapper[4811]: E0128 17:20:07.340720 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:20:12 crc kubenswrapper[4811]: I0128 17:20:12.963343 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.375136 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-5vhsh"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.376329 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.378297 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.379140 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.386041 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-5vhsh"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.475477 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-config-data\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.475587 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.475615 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sr4r\" (UniqueName: \"kubernetes.io/projected/3fc79f2a-5b46-4089-aa7c-517621407f96-kube-api-access-7sr4r\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.475643 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-scripts\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.499293 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.502453 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.505469 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.511325 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.577630 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-scripts\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.578173 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-config-data\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.578304 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.578654 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sr4r\" (UniqueName: \"kubernetes.io/projected/3fc79f2a-5b46-4089-aa7c-517621407f96-kube-api-access-7sr4r\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.584232 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-scripts\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.584979 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.588722 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.596303 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.599650 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.627145 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-config-data\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.680275 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sr4r\" (UniqueName: \"kubernetes.io/projected/3fc79f2a-5b46-4089-aa7c-517621407f96-kube-api-access-7sr4r\") pod \"nova-cell0-cell-mapping-5vhsh\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.685673 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-config-data\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.686101 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s5h7\" (UniqueName: \"kubernetes.io/projected/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-kube-api-access-4s5h7\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.687835 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.688118 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-logs\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.700955 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.713214 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790625 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-config-data\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790691 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-config-data\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790731 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s5h7\" (UniqueName: \"kubernetes.io/projected/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-kube-api-access-4s5h7\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790760 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5471f39-1950-4c21-a18d-b53f4659eb30-logs\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790789 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790810 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-logs\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790849 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.790931 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dw7h\" (UniqueName: \"kubernetes.io/projected/d5471f39-1950-4c21-a18d-b53f4659eb30-kube-api-access-6dw7h\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.793120 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-logs\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.793583 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.795154 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.805287 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.821017 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-config-data\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.826039 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.834402 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s5h7\" (UniqueName: \"kubernetes.io/projected/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-kube-api-access-4s5h7\") pod \"nova-api-0\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " pod="openstack/nova-api-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.838481 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.857755 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b4c886967-f7ls6"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.860103 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.870628 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b4c886967-f7ls6"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.883004 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.885179 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.888340 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893053 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dw7h\" (UniqueName: \"kubernetes.io/projected/d5471f39-1950-4c21-a18d-b53f4659eb30-kube-api-access-6dw7h\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893093 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj27z\" (UniqueName: \"kubernetes.io/projected/10fa4a6b-1328-48af-a375-ad6486d2eff2-kube-api-access-jj27z\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893123 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-dns-svc\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893153 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893185 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-config\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893225 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893258 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-config-data\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893295 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5471f39-1950-4c21-a18d-b53f4659eb30-logs\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893319 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893353 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxb8p\" (UniqueName: \"kubernetes.io/projected/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-kube-api-access-pxb8p\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893381 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nzwc\" (UniqueName: \"kubernetes.io/projected/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-kube-api-access-9nzwc\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893402 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-config-data\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893502 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893545 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.893611 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.894223 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5471f39-1950-4c21-a18d-b53f4659eb30-logs\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.906783 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.912130 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.919509 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-config-data\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.929849 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dw7h\" (UniqueName: \"kubernetes.io/projected/d5471f39-1950-4c21-a18d-b53f4659eb30-kube-api-access-6dw7h\") pod \"nova-metadata-0\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " pod="openstack/nova-metadata-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.996898 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.996986 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997015 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxb8p\" (UniqueName: \"kubernetes.io/projected/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-kube-api-access-pxb8p\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997048 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nzwc\" (UniqueName: \"kubernetes.io/projected/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-kube-api-access-9nzwc\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997073 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-config-data\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997116 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997174 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997206 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj27z\" (UniqueName: \"kubernetes.io/projected/10fa4a6b-1328-48af-a375-ad6486d2eff2-kube-api-access-jj27z\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997236 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-dns-svc\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997259 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.997293 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-config\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:13 crc kubenswrapper[4811]: I0128 17:20:13.998321 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-config\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.002245 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.004276 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-dns-svc\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.005034 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.005508 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.008305 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.008896 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-config-data\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.009979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.025174 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj27z\" (UniqueName: \"kubernetes.io/projected/10fa4a6b-1328-48af-a375-ad6486d2eff2-kube-api-access-jj27z\") pod \"dnsmasq-dns-6b4c886967-f7ls6\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.030988 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nzwc\" (UniqueName: \"kubernetes.io/projected/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-kube-api-access-9nzwc\") pod \"nova-scheduler-0\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.031716 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxb8p\" (UniqueName: \"kubernetes.io/projected/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-kube-api-access-pxb8p\") pod \"nova-cell1-novncproxy-0\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.124486 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.124640 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.154558 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.221036 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.243353 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.376989 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-5vhsh"] Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.501657 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-l4rz7"] Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.502795 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.507463 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.507677 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.534058 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-l4rz7"] Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.609711 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-scripts\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.610122 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-config-data\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.610231 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh872\" (UniqueName: \"kubernetes.io/projected/d34259dd-463f-4e47-9284-da3824688952-kube-api-access-kh872\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.610278 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.647056 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.671613 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:14 crc kubenswrapper[4811]: W0128 17:20:14.679630 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5471f39_1950_4c21_a18d_b53f4659eb30.slice/crio-2ad4b153894722ac4c3327025008da0f5ff13a47187d3d682be3a98fd06124e7 WatchSource:0}: Error finding container 2ad4b153894722ac4c3327025008da0f5ff13a47187d3d682be3a98fd06124e7: Status 404 returned error can't find the container with id 2ad4b153894722ac4c3327025008da0f5ff13a47187d3d682be3a98fd06124e7 Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.691800 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5vhsh" event={"ID":"3fc79f2a-5b46-4089-aa7c-517621407f96","Type":"ContainerStarted","Data":"108f4df49cd263d45463b70d92fc0237e0bc008a8bf3edc50c3d6c14782422d7"} Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.691847 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5vhsh" event={"ID":"3fc79f2a-5b46-4089-aa7c-517621407f96","Type":"ContainerStarted","Data":"f795e090022ed809e6a3cd9b0f0f45ca8c8f595591a558e5cf4582ff0145e43c"} Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.697094 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83","Type":"ContainerStarted","Data":"a0a050d4ab51ef691c9b067ab06d00fbfc90dfffc1ba2cb7ddde230784016607"} Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.712408 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh872\" (UniqueName: \"kubernetes.io/projected/d34259dd-463f-4e47-9284-da3824688952-kube-api-access-kh872\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.712495 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.712533 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-scripts\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.712628 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-config-data\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.715794 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.716348 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-config-data\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.717450 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-scripts\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.729641 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-5vhsh" podStartSLOduration=1.7296139 podStartE2EDuration="1.7296139s" podCreationTimestamp="2026-01-28 17:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:14.706800239 +0000 UTC m=+5707.461163822" watchObservedRunningTime="2026-01-28 17:20:14.7296139 +0000 UTC m=+5707.483977483" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.734298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh872\" (UniqueName: \"kubernetes.io/projected/d34259dd-463f-4e47-9284-da3824688952-kube-api-access-kh872\") pod \"nova-cell1-conductor-db-sync-l4rz7\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.836614 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.842354 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:14 crc kubenswrapper[4811]: W0128 17:20:14.842664 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod300878e5_f3e4_4fc9_bcfc_2cb4ac95a72f.slice/crio-4662b600d21170ffc6bd0bd91dc82d876da5d35ba8942bb913f821d51155c251 WatchSource:0}: Error finding container 4662b600d21170ffc6bd0bd91dc82d876da5d35ba8942bb913f821d51155c251: Status 404 returned error can't find the container with id 4662b600d21170ffc6bd0bd91dc82d876da5d35ba8942bb913f821d51155c251 Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.932404 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b4c886967-f7ls6"] Jan 28 17:20:14 crc kubenswrapper[4811]: I0128 17:20:14.940110 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.165243 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-l4rz7"] Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.708809 4811 generic.go:334] "Generic (PLEG): container finished" podID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerID="86d338e8030441eec65c067ec8b82a8500ea606f2f4fbfe17b6bad6421c0c01c" exitCode=0 Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.708897 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" event={"ID":"10fa4a6b-1328-48af-a375-ad6486d2eff2","Type":"ContainerDied","Data":"86d338e8030441eec65c067ec8b82a8500ea606f2f4fbfe17b6bad6421c0c01c"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.709662 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" event={"ID":"10fa4a6b-1328-48af-a375-ad6486d2eff2","Type":"ContainerStarted","Data":"f6e878a52839527008a6afbe866de054f02ebd4e77ecd6ea8e8f5f5a53eb4b99"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.725784 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" event={"ID":"d34259dd-463f-4e47-9284-da3824688952","Type":"ContainerStarted","Data":"d0e2efe951dda41b33d59a9746a5bdeb2c3469772e8c06b657033b2b019dd94b"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.725835 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" event={"ID":"d34259dd-463f-4e47-9284-da3824688952","Type":"ContainerStarted","Data":"5c065c304df3c9fc41e8d4d9f84f9347342e0aee97e21af547e2573e5aeb9912"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.743085 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5471f39-1950-4c21-a18d-b53f4659eb30","Type":"ContainerStarted","Data":"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.743136 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5471f39-1950-4c21-a18d-b53f4659eb30","Type":"ContainerStarted","Data":"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.743147 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5471f39-1950-4c21-a18d-b53f4659eb30","Type":"ContainerStarted","Data":"2ad4b153894722ac4c3327025008da0f5ff13a47187d3d682be3a98fd06124e7"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.756824 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83","Type":"ContainerStarted","Data":"0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.768550 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f","Type":"ContainerStarted","Data":"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.768615 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f","Type":"ContainerStarted","Data":"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.768631 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f","Type":"ContainerStarted","Data":"4662b600d21170ffc6bd0bd91dc82d876da5d35ba8942bb913f821d51155c251"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.783294 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" podStartSLOduration=1.783268796 podStartE2EDuration="1.783268796s" podCreationTimestamp="2026-01-28 17:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:15.753090624 +0000 UTC m=+5708.507454207" watchObservedRunningTime="2026-01-28 17:20:15.783268796 +0000 UTC m=+5708.537632379" Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.793012 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.79298777 podStartE2EDuration="2.79298777s" podCreationTimestamp="2026-01-28 17:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:15.780735997 +0000 UTC m=+5708.535099580" watchObservedRunningTime="2026-01-28 17:20:15.79298777 +0000 UTC m=+5708.547351353" Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.796679 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0423b7af-d5f2-4cc2-88c8-da8756cbc46d","Type":"ContainerStarted","Data":"faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.796748 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0423b7af-d5f2-4cc2-88c8-da8756cbc46d","Type":"ContainerStarted","Data":"7c4136f04b7296cc35fd76b0ed5b2e973e50d7585ab3605250490fca4a46853b"} Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.859977 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.859951144 podStartE2EDuration="2.859951144s" podCreationTimestamp="2026-01-28 17:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:15.807536757 +0000 UTC m=+5708.561900360" watchObservedRunningTime="2026-01-28 17:20:15.859951144 +0000 UTC m=+5708.614314727" Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.878538 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.878506989 podStartE2EDuration="2.878506989s" podCreationTimestamp="2026-01-28 17:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:15.850492726 +0000 UTC m=+5708.604856309" watchObservedRunningTime="2026-01-28 17:20:15.878506989 +0000 UTC m=+5708.632870572" Jan 28 17:20:15 crc kubenswrapper[4811]: I0128 17:20:15.892222 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.892199233 podStartE2EDuration="2.892199233s" podCreationTimestamp="2026-01-28 17:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:15.86895784 +0000 UTC m=+5708.623321413" watchObservedRunningTime="2026-01-28 17:20:15.892199233 +0000 UTC m=+5708.646562816" Jan 28 17:20:16 crc kubenswrapper[4811]: I0128 17:20:16.808491 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" event={"ID":"10fa4a6b-1328-48af-a375-ad6486d2eff2","Type":"ContainerStarted","Data":"a0f254f1d1cfdb701aa081f1de29bf741e92df4d4638fe1db765abd47b04f717"} Jan 28 17:20:16 crc kubenswrapper[4811]: I0128 17:20:16.810345 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:16 crc kubenswrapper[4811]: I0128 17:20:16.831318 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" podStartSLOduration=3.831295059 podStartE2EDuration="3.831295059s" podCreationTimestamp="2026-01-28 17:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:16.830123297 +0000 UTC m=+5709.584486890" watchObservedRunningTime="2026-01-28 17:20:16.831295059 +0000 UTC m=+5709.585658642" Jan 28 17:20:18 crc kubenswrapper[4811]: I0128 17:20:18.345946 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:20:18 crc kubenswrapper[4811]: E0128 17:20:18.346131 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:20:19 crc kubenswrapper[4811]: I0128 17:20:19.125096 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:20:19 crc kubenswrapper[4811]: I0128 17:20:19.125160 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:20:19 crc kubenswrapper[4811]: I0128 17:20:19.155342 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 17:20:19 crc kubenswrapper[4811]: I0128 17:20:19.243739 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:19 crc kubenswrapper[4811]: I0128 17:20:19.831285 4811 generic.go:334] "Generic (PLEG): container finished" podID="d34259dd-463f-4e47-9284-da3824688952" containerID="d0e2efe951dda41b33d59a9746a5bdeb2c3469772e8c06b657033b2b019dd94b" exitCode=0 Jan 28 17:20:19 crc kubenswrapper[4811]: I0128 17:20:19.831340 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" event={"ID":"d34259dd-463f-4e47-9284-da3824688952","Type":"ContainerDied","Data":"d0e2efe951dda41b33d59a9746a5bdeb2c3469772e8c06b657033b2b019dd94b"} Jan 28 17:20:20 crc kubenswrapper[4811]: I0128 17:20:20.840923 4811 generic.go:334] "Generic (PLEG): container finished" podID="3fc79f2a-5b46-4089-aa7c-517621407f96" containerID="108f4df49cd263d45463b70d92fc0237e0bc008a8bf3edc50c3d6c14782422d7" exitCode=0 Jan 28 17:20:20 crc kubenswrapper[4811]: I0128 17:20:20.841079 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5vhsh" event={"ID":"3fc79f2a-5b46-4089-aa7c-517621407f96","Type":"ContainerDied","Data":"108f4df49cd263d45463b70d92fc0237e0bc008a8bf3edc50c3d6c14782422d7"} Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.210001 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.272729 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-scripts\") pod \"d34259dd-463f-4e47-9284-da3824688952\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.272806 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-combined-ca-bundle\") pod \"d34259dd-463f-4e47-9284-da3824688952\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.272882 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-config-data\") pod \"d34259dd-463f-4e47-9284-da3824688952\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.272917 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kh872\" (UniqueName: \"kubernetes.io/projected/d34259dd-463f-4e47-9284-da3824688952-kube-api-access-kh872\") pod \"d34259dd-463f-4e47-9284-da3824688952\" (UID: \"d34259dd-463f-4e47-9284-da3824688952\") " Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.279006 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-scripts" (OuterVolumeSpecName: "scripts") pod "d34259dd-463f-4e47-9284-da3824688952" (UID: "d34259dd-463f-4e47-9284-da3824688952"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.297185 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d34259dd-463f-4e47-9284-da3824688952-kube-api-access-kh872" (OuterVolumeSpecName: "kube-api-access-kh872") pod "d34259dd-463f-4e47-9284-da3824688952" (UID: "d34259dd-463f-4e47-9284-da3824688952"). InnerVolumeSpecName "kube-api-access-kh872". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.299396 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d34259dd-463f-4e47-9284-da3824688952" (UID: "d34259dd-463f-4e47-9284-da3824688952"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.306945 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-config-data" (OuterVolumeSpecName: "config-data") pod "d34259dd-463f-4e47-9284-da3824688952" (UID: "d34259dd-463f-4e47-9284-da3824688952"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.375766 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.375811 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.375825 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34259dd-463f-4e47-9284-da3824688952-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.375837 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kh872\" (UniqueName: \"kubernetes.io/projected/d34259dd-463f-4e47-9284-da3824688952-kube-api-access-kh872\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.859871 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" event={"ID":"d34259dd-463f-4e47-9284-da3824688952","Type":"ContainerDied","Data":"5c065c304df3c9fc41e8d4d9f84f9347342e0aee97e21af547e2573e5aeb9912"} Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.859920 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c065c304df3c9fc41e8d4d9f84f9347342e0aee97e21af547e2573e5aeb9912" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.859950 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-l4rz7" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.959680 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:20:21 crc kubenswrapper[4811]: E0128 17:20:21.960160 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d34259dd-463f-4e47-9284-da3824688952" containerName="nova-cell1-conductor-db-sync" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.960240 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d34259dd-463f-4e47-9284-da3824688952" containerName="nova-cell1-conductor-db-sync" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.960490 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d34259dd-463f-4e47-9284-da3824688952" containerName="nova-cell1-conductor-db-sync" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.961227 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.964387 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.978920 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.984719 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.984860 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:21 crc kubenswrapper[4811]: I0128 17:20:21.984893 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjp5m\" (UniqueName: \"kubernetes.io/projected/633327c1-9418-4e00-b72f-3c742b26243c-kube-api-access-cjp5m\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.086124 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.086193 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjp5m\" (UniqueName: \"kubernetes.io/projected/633327c1-9418-4e00-b72f-3c742b26243c-kube-api-access-cjp5m\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.086300 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.092218 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.108109 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjp5m\" (UniqueName: \"kubernetes.io/projected/633327c1-9418-4e00-b72f-3c742b26243c-kube-api-access-cjp5m\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.109458 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.215094 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.286164 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.289201 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-scripts\") pod \"3fc79f2a-5b46-4089-aa7c-517621407f96\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.289240 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sr4r\" (UniqueName: \"kubernetes.io/projected/3fc79f2a-5b46-4089-aa7c-517621407f96-kube-api-access-7sr4r\") pod \"3fc79f2a-5b46-4089-aa7c-517621407f96\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.289269 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-combined-ca-bundle\") pod \"3fc79f2a-5b46-4089-aa7c-517621407f96\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.289327 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-config-data\") pod \"3fc79f2a-5b46-4089-aa7c-517621407f96\" (UID: \"3fc79f2a-5b46-4089-aa7c-517621407f96\") " Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.295234 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fc79f2a-5b46-4089-aa7c-517621407f96-kube-api-access-7sr4r" (OuterVolumeSpecName: "kube-api-access-7sr4r") pod "3fc79f2a-5b46-4089-aa7c-517621407f96" (UID: "3fc79f2a-5b46-4089-aa7c-517621407f96"). InnerVolumeSpecName "kube-api-access-7sr4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.299232 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-scripts" (OuterVolumeSpecName: "scripts") pod "3fc79f2a-5b46-4089-aa7c-517621407f96" (UID: "3fc79f2a-5b46-4089-aa7c-517621407f96"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.315756 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fc79f2a-5b46-4089-aa7c-517621407f96" (UID: "3fc79f2a-5b46-4089-aa7c-517621407f96"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.320393 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-config-data" (OuterVolumeSpecName: "config-data") pod "3fc79f2a-5b46-4089-aa7c-517621407f96" (UID: "3fc79f2a-5b46-4089-aa7c-517621407f96"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.394853 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.394875 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sr4r\" (UniqueName: \"kubernetes.io/projected/3fc79f2a-5b46-4089-aa7c-517621407f96-kube-api-access-7sr4r\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.394906 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.394917 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc79f2a-5b46-4089-aa7c-517621407f96-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.774054 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:20:22 crc kubenswrapper[4811]: W0128 17:20:22.778531 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod633327c1_9418_4e00_b72f_3c742b26243c.slice/crio-9c71019d16229c944b6eb5b9b90cd5b6ce0a91b1e8f24c2044264db44019cf56 WatchSource:0}: Error finding container 9c71019d16229c944b6eb5b9b90cd5b6ce0a91b1e8f24c2044264db44019cf56: Status 404 returned error can't find the container with id 9c71019d16229c944b6eb5b9b90cd5b6ce0a91b1e8f24c2044264db44019cf56 Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.871269 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5vhsh" event={"ID":"3fc79f2a-5b46-4089-aa7c-517621407f96","Type":"ContainerDied","Data":"f795e090022ed809e6a3cd9b0f0f45ca8c8f595591a558e5cf4582ff0145e43c"} Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.871317 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f795e090022ed809e6a3cd9b0f0f45ca8c8f595591a558e5cf4582ff0145e43c" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.871291 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5vhsh" Jan 28 17:20:22 crc kubenswrapper[4811]: I0128 17:20:22.878555 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"633327c1-9418-4e00-b72f-3c742b26243c","Type":"ContainerStarted","Data":"9c71019d16229c944b6eb5b9b90cd5b6ce0a91b1e8f24c2044264db44019cf56"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.038618 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.038885 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-log" containerID="cri-o://e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c" gracePeriod=30 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.039021 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-api" containerID="cri-o://db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f" gracePeriod=30 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.048383 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.048663 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" containerName="nova-scheduler-scheduler" containerID="cri-o://0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9" gracePeriod=30 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.075014 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.075285 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-log" containerID="cri-o://d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8" gracePeriod=30 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.075339 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-metadata" containerID="cri-o://4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37" gracePeriod=30 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.620098 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.625595 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631323 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-combined-ca-bundle\") pod \"d5471f39-1950-4c21-a18d-b53f4659eb30\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631377 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-config-data\") pod \"d5471f39-1950-4c21-a18d-b53f4659eb30\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631455 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5471f39-1950-4c21-a18d-b53f4659eb30-logs\") pod \"d5471f39-1950-4c21-a18d-b53f4659eb30\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631480 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dw7h\" (UniqueName: \"kubernetes.io/projected/d5471f39-1950-4c21-a18d-b53f4659eb30-kube-api-access-6dw7h\") pod \"d5471f39-1950-4c21-a18d-b53f4659eb30\" (UID: \"d5471f39-1950-4c21-a18d-b53f4659eb30\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631571 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-combined-ca-bundle\") pod \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631611 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-logs\") pod \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631636 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4s5h7\" (UniqueName: \"kubernetes.io/projected/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-kube-api-access-4s5h7\") pod \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631713 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-config-data\") pod \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\" (UID: \"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f\") " Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.631913 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5471f39-1950-4c21-a18d-b53f4659eb30-logs" (OuterVolumeSpecName: "logs") pod "d5471f39-1950-4c21-a18d-b53f4659eb30" (UID: "d5471f39-1950-4c21-a18d-b53f4659eb30"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.632316 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-logs" (OuterVolumeSpecName: "logs") pod "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" (UID: "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.632379 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5471f39-1950-4c21-a18d-b53f4659eb30-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.638776 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-kube-api-access-4s5h7" (OuterVolumeSpecName: "kube-api-access-4s5h7") pod "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" (UID: "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f"). InnerVolumeSpecName "kube-api-access-4s5h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.646704 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5471f39-1950-4c21-a18d-b53f4659eb30-kube-api-access-6dw7h" (OuterVolumeSpecName: "kube-api-access-6dw7h") pod "d5471f39-1950-4c21-a18d-b53f4659eb30" (UID: "d5471f39-1950-4c21-a18d-b53f4659eb30"). InnerVolumeSpecName "kube-api-access-6dw7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.666717 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d5471f39-1950-4c21-a18d-b53f4659eb30" (UID: "d5471f39-1950-4c21-a18d-b53f4659eb30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.703113 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-config-data" (OuterVolumeSpecName: "config-data") pod "d5471f39-1950-4c21-a18d-b53f4659eb30" (UID: "d5471f39-1950-4c21-a18d-b53f4659eb30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.703492 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" (UID: "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.717970 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-config-data" (OuterVolumeSpecName: "config-data") pod "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" (UID: "300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734225 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734272 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5471f39-1950-4c21-a18d-b53f4659eb30-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734282 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dw7h\" (UniqueName: \"kubernetes.io/projected/d5471f39-1950-4c21-a18d-b53f4659eb30-kube-api-access-6dw7h\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734293 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734302 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734311 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4s5h7\" (UniqueName: \"kubernetes.io/projected/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-kube-api-access-4s5h7\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.734319 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.890065 4811 generic.go:334] "Generic (PLEG): container finished" podID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerID="4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37" exitCode=0 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.890104 4811 generic.go:334] "Generic (PLEG): container finished" podID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerID="d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8" exitCode=143 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.890212 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.891100 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5471f39-1950-4c21-a18d-b53f4659eb30","Type":"ContainerDied","Data":"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.891202 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5471f39-1950-4c21-a18d-b53f4659eb30","Type":"ContainerDied","Data":"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.891218 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5471f39-1950-4c21-a18d-b53f4659eb30","Type":"ContainerDied","Data":"2ad4b153894722ac4c3327025008da0f5ff13a47187d3d682be3a98fd06124e7"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.891240 4811 scope.go:117] "RemoveContainer" containerID="4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.893806 4811 generic.go:334] "Generic (PLEG): container finished" podID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerID="db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f" exitCode=0 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.893849 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f","Type":"ContainerDied","Data":"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.893865 4811 generic.go:334] "Generic (PLEG): container finished" podID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerID="e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c" exitCode=143 Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.893889 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f","Type":"ContainerDied","Data":"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.893895 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.893907 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f","Type":"ContainerDied","Data":"4662b600d21170ffc6bd0bd91dc82d876da5d35ba8942bb913f821d51155c251"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.896590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"633327c1-9418-4e00-b72f-3c742b26243c","Type":"ContainerStarted","Data":"321bd8c868e7bfa317d34c8878c89a986acbcd1af0b832e5fdaef2fb6a03317e"} Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.896882 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.953608 4811 scope.go:117] "RemoveContainer" containerID="d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8" Jan 28 17:20:23 crc kubenswrapper[4811]: I0128 17:20:23.965010 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.964989826 podStartE2EDuration="2.964989826s" podCreationTimestamp="2026-01-28 17:20:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:23.953984346 +0000 UTC m=+5716.708347929" watchObservedRunningTime="2026-01-28 17:20:23.964989826 +0000 UTC m=+5716.719353409" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.000512 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.025625 4811 scope.go:117] "RemoveContainer" containerID="4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.025939 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.029762 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37\": container with ID starting with 4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37 not found: ID does not exist" containerID="4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.029803 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37"} err="failed to get container status \"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37\": rpc error: code = NotFound desc = could not find container \"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37\": container with ID starting with 4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37 not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.029831 4811 scope.go:117] "RemoveContainer" containerID="d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.033574 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8\": container with ID starting with d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8 not found: ID does not exist" containerID="d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.033613 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8"} err="failed to get container status \"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8\": rpc error: code = NotFound desc = could not find container \"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8\": container with ID starting with d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8 not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.033639 4811 scope.go:117] "RemoveContainer" containerID="4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.044552 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37"} err="failed to get container status \"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37\": rpc error: code = NotFound desc = could not find container \"4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37\": container with ID starting with 4787f4d3ce0691e844366dc599dd4b3ac3c1abb1d95d5a937601ff51b6068f37 not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.044596 4811 scope.go:117] "RemoveContainer" containerID="d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.045575 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8"} err="failed to get container status \"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8\": rpc error: code = NotFound desc = could not find container \"d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8\": container with ID starting with d4299378e2e3067cad879504d085872a6b360e143ecac7a372f73a578ec4cbe8 not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.045604 4811 scope.go:117] "RemoveContainer" containerID="db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.045722 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.053743 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.054155 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-metadata" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054169 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-metadata" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.054179 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc79f2a-5b46-4089-aa7c-517621407f96" containerName="nova-manage" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054186 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc79f2a-5b46-4089-aa7c-517621407f96" containerName="nova-manage" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.054209 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-log" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054216 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-log" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.054227 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-log" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054232 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-log" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.054240 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-api" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054245 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-api" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054396 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-metadata" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054411 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-api" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054425 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" containerName="nova-api-log" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054496 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc79f2a-5b46-4089-aa7c-517621407f96" containerName="nova-manage" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.054508 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" containerName="nova-metadata-log" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.055564 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.063680 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.070179 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.070628 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.080346 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.084651 4811 scope.go:117] "RemoveContainer" containerID="e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.094592 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.094696 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.109293 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.165721 4811 scope.go:117] "RemoveContainer" containerID="db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.166657 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f\": container with ID starting with db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f not found: ID does not exist" containerID="db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.166699 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f"} err="failed to get container status \"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f\": rpc error: code = NotFound desc = could not find container \"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f\": container with ID starting with db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.166726 4811 scope.go:117] "RemoveContainer" containerID="e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c" Jan 28 17:20:24 crc kubenswrapper[4811]: E0128 17:20:24.167049 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c\": container with ID starting with e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c not found: ID does not exist" containerID="e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.167079 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c"} err="failed to get container status \"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c\": rpc error: code = NotFound desc = could not find container \"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c\": container with ID starting with e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.167104 4811 scope.go:117] "RemoveContainer" containerID="db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.167353 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f"} err="failed to get container status \"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f\": rpc error: code = NotFound desc = could not find container \"db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f\": container with ID starting with db7b9d774a2a95d509ab07492ec83659dd1c91271b51138d92bfb9771dc5e44f not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.167573 4811 scope.go:117] "RemoveContainer" containerID="e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.168651 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c"} err="failed to get container status \"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c\": rpc error: code = NotFound desc = could not find container \"e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c\": container with ID starting with e2fb6fab55f0936e8fd3718eefd0191f700bc74af6b2df2cf0f9c4bf11440e1c not found: ID does not exist" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.228623 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.241681 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfc67\" (UniqueName: \"kubernetes.io/projected/a96355bd-116e-47cb-8b84-21b4e1f75b9b-kube-api-access-rfc67\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243118 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9a6e27c-3fc7-4dac-895b-755c65a397bb-logs\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243244 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96355bd-116e-47cb-8b84-21b4e1f75b9b-logs\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243286 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-config-data\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243300 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbnhc\" (UniqueName: \"kubernetes.io/projected/b9a6e27c-3fc7-4dac-895b-755c65a397bb-kube-api-access-lbnhc\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243342 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-config-data\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.243526 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.244105 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.259221 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.296350 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dbc457d5-xbhz5"] Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.296889 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="dnsmasq-dns" containerID="cri-o://dadb37bfcbd13c8eef53a93b7d3f1d952582b86dca11c3ba39b9c9460fb73108" gracePeriod=10 Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.355907 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.355962 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96355bd-116e-47cb-8b84-21b4e1f75b9b-logs\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.357063 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96355bd-116e-47cb-8b84-21b4e1f75b9b-logs\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.357407 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-config-data\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.357970 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f" path="/var/lib/kubelet/pods/300878e5-f3e4-4fc9-bcfc-2cb4ac95a72f/volumes" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.358861 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5471f39-1950-4c21-a18d-b53f4659eb30" path="/var/lib/kubelet/pods/d5471f39-1950-4c21-a18d-b53f4659eb30/volumes" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.360478 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbnhc\" (UniqueName: \"kubernetes.io/projected/b9a6e27c-3fc7-4dac-895b-755c65a397bb-kube-api-access-lbnhc\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.360774 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-config-data\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.360932 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.361049 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfc67\" (UniqueName: \"kubernetes.io/projected/a96355bd-116e-47cb-8b84-21b4e1f75b9b-kube-api-access-rfc67\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.361151 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9a6e27c-3fc7-4dac-895b-755c65a397bb-logs\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.362622 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.364366 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-config-data\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.364832 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9a6e27c-3fc7-4dac-895b-755c65a397bb-logs\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.370529 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.370620 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-config-data\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.385079 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbnhc\" (UniqueName: \"kubernetes.io/projected/b9a6e27c-3fc7-4dac-895b-755c65a397bb-kube-api-access-lbnhc\") pod \"nova-metadata-0\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.386121 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfc67\" (UniqueName: \"kubernetes.io/projected/a96355bd-116e-47cb-8b84-21b4e1f75b9b-kube-api-access-rfc67\") pod \"nova-api-0\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.465947 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.684879 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.912156 4811 generic.go:334] "Generic (PLEG): container finished" podID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerID="dadb37bfcbd13c8eef53a93b7d3f1d952582b86dca11c3ba39b9c9460fb73108" exitCode=0 Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.912209 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" event={"ID":"225db1e0-fddb-4f42-95fe-2a44bdbec853","Type":"ContainerDied","Data":"dadb37bfcbd13c8eef53a93b7d3f1d952582b86dca11c3ba39b9c9460fb73108"} Jan 28 17:20:24 crc kubenswrapper[4811]: I0128 17:20:24.924287 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.014235 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:25 crc kubenswrapper[4811]: W0128 17:20:25.023707 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda96355bd_116e_47cb_8b84_21b4e1f75b9b.slice/crio-3ee1c9c0733fcf1f8802191ab6d3366abb046a91c931307cfdab8ebef9eb6aaf WatchSource:0}: Error finding container 3ee1c9c0733fcf1f8802191ab6d3366abb046a91c931307cfdab8ebef9eb6aaf: Status 404 returned error can't find the container with id 3ee1c9c0733fcf1f8802191ab6d3366abb046a91c931307cfdab8ebef9eb6aaf Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.169436 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:25 crc kubenswrapper[4811]: W0128 17:20:25.204613 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9a6e27c_3fc7_4dac_895b_755c65a397bb.slice/crio-72210e4667ffd000c2725e73fa566bd54396af3ff5cd86cd140b67a5d73c457d WatchSource:0}: Error finding container 72210e4667ffd000c2725e73fa566bd54396af3ff5cd86cd140b67a5d73c457d: Status 404 returned error can't find the container with id 72210e4667ffd000c2725e73fa566bd54396af3ff5cd86cd140b67a5d73c457d Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.466314 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.585349 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-sb\") pod \"225db1e0-fddb-4f42-95fe-2a44bdbec853\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.585474 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-config\") pod \"225db1e0-fddb-4f42-95fe-2a44bdbec853\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.585508 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-nb\") pod \"225db1e0-fddb-4f42-95fe-2a44bdbec853\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.585565 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w4v9\" (UniqueName: \"kubernetes.io/projected/225db1e0-fddb-4f42-95fe-2a44bdbec853-kube-api-access-5w4v9\") pod \"225db1e0-fddb-4f42-95fe-2a44bdbec853\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.585661 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-dns-svc\") pod \"225db1e0-fddb-4f42-95fe-2a44bdbec853\" (UID: \"225db1e0-fddb-4f42-95fe-2a44bdbec853\") " Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.592639 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/225db1e0-fddb-4f42-95fe-2a44bdbec853-kube-api-access-5w4v9" (OuterVolumeSpecName: "kube-api-access-5w4v9") pod "225db1e0-fddb-4f42-95fe-2a44bdbec853" (UID: "225db1e0-fddb-4f42-95fe-2a44bdbec853"). InnerVolumeSpecName "kube-api-access-5w4v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.626205 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-config" (OuterVolumeSpecName: "config") pod "225db1e0-fddb-4f42-95fe-2a44bdbec853" (UID: "225db1e0-fddb-4f42-95fe-2a44bdbec853"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.630692 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "225db1e0-fddb-4f42-95fe-2a44bdbec853" (UID: "225db1e0-fddb-4f42-95fe-2a44bdbec853"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.632048 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "225db1e0-fddb-4f42-95fe-2a44bdbec853" (UID: "225db1e0-fddb-4f42-95fe-2a44bdbec853"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.634678 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "225db1e0-fddb-4f42-95fe-2a44bdbec853" (UID: "225db1e0-fddb-4f42-95fe-2a44bdbec853"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.687836 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.687870 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.687880 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.687890 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/225db1e0-fddb-4f42-95fe-2a44bdbec853-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.687901 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w4v9\" (UniqueName: \"kubernetes.io/projected/225db1e0-fddb-4f42-95fe-2a44bdbec853-kube-api-access-5w4v9\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.925627 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96355bd-116e-47cb-8b84-21b4e1f75b9b","Type":"ContainerStarted","Data":"5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.925687 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96355bd-116e-47cb-8b84-21b4e1f75b9b","Type":"ContainerStarted","Data":"83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.925700 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96355bd-116e-47cb-8b84-21b4e1f75b9b","Type":"ContainerStarted","Data":"3ee1c9c0733fcf1f8802191ab6d3366abb046a91c931307cfdab8ebef9eb6aaf"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.928645 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9a6e27c-3fc7-4dac-895b-755c65a397bb","Type":"ContainerStarted","Data":"5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.928728 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9a6e27c-3fc7-4dac-895b-755c65a397bb","Type":"ContainerStarted","Data":"5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.928776 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9a6e27c-3fc7-4dac-895b-755c65a397bb","Type":"ContainerStarted","Data":"72210e4667ffd000c2725e73fa566bd54396af3ff5cd86cd140b67a5d73c457d"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.932886 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.934577 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" event={"ID":"225db1e0-fddb-4f42-95fe-2a44bdbec853","Type":"ContainerDied","Data":"b0b3e818a7b8aabdc9339e7e171b369db51d5db81d5746b9fe87408b1ca22215"} Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.934734 4811 scope.go:117] "RemoveContainer" containerID="dadb37bfcbd13c8eef53a93b7d3f1d952582b86dca11c3ba39b9c9460fb73108" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.951060 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.951040425 podStartE2EDuration="1.951040425s" podCreationTimestamp="2026-01-28 17:20:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:25.942671597 +0000 UTC m=+5718.697035200" watchObservedRunningTime="2026-01-28 17:20:25.951040425 +0000 UTC m=+5718.705404008" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.959748 4811 scope.go:117] "RemoveContainer" containerID="8e969f58d02be96aa11a7d54549708823f2ea7384b663c92d0f65e22646658be" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.965370 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.965352626 podStartE2EDuration="2.965352626s" podCreationTimestamp="2026-01-28 17:20:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:25.960213746 +0000 UTC m=+5718.714577329" watchObservedRunningTime="2026-01-28 17:20:25.965352626 +0000 UTC m=+5718.719716209" Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.987278 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dbc457d5-xbhz5"] Jan 28 17:20:25 crc kubenswrapper[4811]: I0128 17:20:25.996197 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dbc457d5-xbhz5"] Jan 28 17:20:26 crc kubenswrapper[4811]: I0128 17:20:26.351221 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" path="/var/lib/kubelet/pods/225db1e0-fddb-4f42-95fe-2a44bdbec853/volumes" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.315792 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.863178 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.967050 4811 generic.go:334] "Generic (PLEG): container finished" podID="794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" containerID="0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9" exitCode=0 Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.967096 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83","Type":"ContainerDied","Data":"0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9"} Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.967122 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83","Type":"ContainerDied","Data":"a0a050d4ab51ef691c9b067ab06d00fbfc90dfffc1ba2cb7ddde230784016607"} Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.967121 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.967139 4811 scope.go:117] "RemoveContainer" containerID="0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.992706 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-8f868"] Jan 28 17:20:27 crc kubenswrapper[4811]: E0128 17:20:27.993160 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="dnsmasq-dns" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.993181 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="dnsmasq-dns" Jan 28 17:20:27 crc kubenswrapper[4811]: E0128 17:20:27.993222 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="init" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.993231 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="init" Jan 28 17:20:27 crc kubenswrapper[4811]: E0128 17:20:27.993277 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" containerName="nova-scheduler-scheduler" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.993287 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" containerName="nova-scheduler-scheduler" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.993542 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" containerName="nova-scheduler-scheduler" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.993568 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="dnsmasq-dns" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.994322 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.996945 4811 scope.go:117] "RemoveContainer" containerID="0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9" Jan 28 17:20:27 crc kubenswrapper[4811]: E0128 17:20:27.997655 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9\": container with ID starting with 0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9 not found: ID does not exist" containerID="0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.997686 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9"} err="failed to get container status \"0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9\": rpc error: code = NotFound desc = could not find container \"0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9\": container with ID starting with 0cccf4485a64ac7256d0e4def2e5bfe4625362794c4c3f1a7ec3bc44e604c2c9 not found: ID does not exist" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.998614 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 28 17:20:27 crc kubenswrapper[4811]: I0128 17:20:27.999401 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.007526 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-8f868"] Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031324 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-config-data\") pod \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031396 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nzwc\" (UniqueName: \"kubernetes.io/projected/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-kube-api-access-9nzwc\") pod \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031491 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-combined-ca-bundle\") pod \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\" (UID: \"794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83\") " Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031690 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-config-data\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031787 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-scripts\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031807 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.031839 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsvpr\" (UniqueName: \"kubernetes.io/projected/5f89fe70-e439-44f2-9c8f-f665bb2b646d-kube-api-access-zsvpr\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.042467 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-kube-api-access-9nzwc" (OuterVolumeSpecName: "kube-api-access-9nzwc") pod "794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" (UID: "794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83"). InnerVolumeSpecName "kube-api-access-9nzwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.056031 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" (UID: "794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.056300 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-config-data" (OuterVolumeSpecName: "config-data") pod "794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" (UID: "794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133302 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-scripts\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133365 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133412 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsvpr\" (UniqueName: \"kubernetes.io/projected/5f89fe70-e439-44f2-9c8f-f665bb2b646d-kube-api-access-zsvpr\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133519 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-config-data\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133611 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133626 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nzwc\" (UniqueName: \"kubernetes.io/projected/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-kube-api-access-9nzwc\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.133638 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.138097 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.139513 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-scripts\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.141199 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-config-data\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.150180 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsvpr\" (UniqueName: \"kubernetes.io/projected/5f89fe70-e439-44f2-9c8f-f665bb2b646d-kube-api-access-zsvpr\") pod \"nova-cell1-cell-mapping-8f868\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.311040 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.314663 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.328403 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.378145 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83" path="/var/lib/kubelet/pods/794dd4c7-1518-4c16-bc5b-0bfdcd4ebf83/volumes" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.379932 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.386477 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.389876 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.390419 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.452547 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.452601 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-config-data\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.452651 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2b5m\" (UniqueName: \"kubernetes.io/projected/1633ecf6-e771-4579-9792-5ecc9c915f5d-kube-api-access-g2b5m\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.553752 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.553810 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-config-data\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.553872 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2b5m\" (UniqueName: \"kubernetes.io/projected/1633ecf6-e771-4579-9792-5ecc9c915f5d-kube-api-access-g2b5m\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.558297 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.562971 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-config-data\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.574588 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2b5m\" (UniqueName: \"kubernetes.io/projected/1633ecf6-e771-4579-9792-5ecc9c915f5d-kube-api-access-g2b5m\") pod \"nova-scheduler-0\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.715068 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.770012 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-8f868"] Jan 28 17:20:28 crc kubenswrapper[4811]: W0128 17:20:28.771776 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f89fe70_e439_44f2_9c8f_f665bb2b646d.slice/crio-9b794f3e38fef0720890e36ba60e3348de1e49dc87d0552c8dd7b0efa7d90127 WatchSource:0}: Error finding container 9b794f3e38fef0720890e36ba60e3348de1e49dc87d0552c8dd7b0efa7d90127: Status 404 returned error can't find the container with id 9b794f3e38fef0720890e36ba60e3348de1e49dc87d0552c8dd7b0efa7d90127 Jan 28 17:20:28 crc kubenswrapper[4811]: I0128 17:20:28.979423 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-8f868" event={"ID":"5f89fe70-e439-44f2-9c8f-f665bb2b646d","Type":"ContainerStarted","Data":"9b794f3e38fef0720890e36ba60e3348de1e49dc87d0552c8dd7b0efa7d90127"} Jan 28 17:20:29 crc kubenswrapper[4811]: I0128 17:20:29.155795 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:29 crc kubenswrapper[4811]: I0128 17:20:29.685575 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:20:29 crc kubenswrapper[4811]: I0128 17:20:29.685620 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:20:29 crc kubenswrapper[4811]: I0128 17:20:29.988420 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1633ecf6-e771-4579-9792-5ecc9c915f5d","Type":"ContainerStarted","Data":"aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6"} Jan 28 17:20:29 crc kubenswrapper[4811]: I0128 17:20:29.988720 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1633ecf6-e771-4579-9792-5ecc9c915f5d","Type":"ContainerStarted","Data":"5f9dc02d8c9a3dd5945c88fd93ae98ce86b7d92beeb54b267d188bce51de4446"} Jan 28 17:20:29 crc kubenswrapper[4811]: I0128 17:20:29.989784 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-8f868" event={"ID":"5f89fe70-e439-44f2-9c8f-f665bb2b646d","Type":"ContainerStarted","Data":"b9c690e72a48f4814450f69b4e8231d998072f21fb9eabf4df7c871f0031c1f4"} Jan 28 17:20:30 crc kubenswrapper[4811]: I0128 17:20:30.009601 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.00957797 podStartE2EDuration="2.00957797s" podCreationTimestamp="2026-01-28 17:20:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:30.003166695 +0000 UTC m=+5722.757530288" watchObservedRunningTime="2026-01-28 17:20:30.00957797 +0000 UTC m=+5722.763941553" Jan 28 17:20:30 crc kubenswrapper[4811]: I0128 17:20:30.024185 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-8f868" podStartSLOduration=3.024160447 podStartE2EDuration="3.024160447s" podCreationTimestamp="2026-01-28 17:20:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:30.016150869 +0000 UTC m=+5722.770514462" watchObservedRunningTime="2026-01-28 17:20:30.024160447 +0000 UTC m=+5722.778524030" Jan 28 17:20:30 crc kubenswrapper[4811]: I0128 17:20:30.324533 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6dbc457d5-xbhz5" podUID="225db1e0-fddb-4f42-95fe-2a44bdbec853" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.49:5353: i/o timeout" Jan 28 17:20:30 crc kubenswrapper[4811]: I0128 17:20:30.916386 4811 scope.go:117] "RemoveContainer" containerID="87e6965928cc872303d4d01c5837025a5bb31e2d0f9d3789d3680acafc5a787b" Jan 28 17:20:30 crc kubenswrapper[4811]: I0128 17:20:30.941346 4811 scope.go:117] "RemoveContainer" containerID="9b1d4b77627a6fa549eb43af84e4e1be84c2789cbae50ef793fa72637506caa4" Jan 28 17:20:30 crc kubenswrapper[4811]: I0128 17:20:30.962614 4811 scope.go:117] "RemoveContainer" containerID="db466615f30a2918bcfce8570921db9db4997054485fde2eefde214523fb962f" Jan 28 17:20:32 crc kubenswrapper[4811]: I0128 17:20:32.339135 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:20:32 crc kubenswrapper[4811]: E0128 17:20:32.339728 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:20:33 crc kubenswrapper[4811]: I0128 17:20:33.715973 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 17:20:34 crc kubenswrapper[4811]: I0128 17:20:34.026016 4811 generic.go:334] "Generic (PLEG): container finished" podID="5f89fe70-e439-44f2-9c8f-f665bb2b646d" containerID="b9c690e72a48f4814450f69b4e8231d998072f21fb9eabf4df7c871f0031c1f4" exitCode=0 Jan 28 17:20:34 crc kubenswrapper[4811]: I0128 17:20:34.026064 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-8f868" event={"ID":"5f89fe70-e439-44f2-9c8f-f665bb2b646d","Type":"ContainerDied","Data":"b9c690e72a48f4814450f69b4e8231d998072f21fb9eabf4df7c871f0031c1f4"} Jan 28 17:20:34 crc kubenswrapper[4811]: I0128 17:20:34.466981 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 17:20:34 crc kubenswrapper[4811]: I0128 17:20:34.467060 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 17:20:34 crc kubenswrapper[4811]: I0128 17:20:34.685483 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 17:20:34 crc kubenswrapper[4811]: I0128 17:20:34.685560 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.405159 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.549099 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.70:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.549551 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.70:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.588089 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsvpr\" (UniqueName: \"kubernetes.io/projected/5f89fe70-e439-44f2-9c8f-f665bb2b646d-kube-api-access-zsvpr\") pod \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.588579 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-scripts\") pod \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.588647 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-config-data\") pod \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.588735 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-combined-ca-bundle\") pod \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\" (UID: \"5f89fe70-e439-44f2-9c8f-f665bb2b646d\") " Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.604803 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f89fe70-e439-44f2-9c8f-f665bb2b646d-kube-api-access-zsvpr" (OuterVolumeSpecName: "kube-api-access-zsvpr") pod "5f89fe70-e439-44f2-9c8f-f665bb2b646d" (UID: "5f89fe70-e439-44f2-9c8f-f665bb2b646d"). InnerVolumeSpecName "kube-api-access-zsvpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.608577 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-scripts" (OuterVolumeSpecName: "scripts") pod "5f89fe70-e439-44f2-9c8f-f665bb2b646d" (UID: "5f89fe70-e439-44f2-9c8f-f665bb2b646d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.614063 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f89fe70-e439-44f2-9c8f-f665bb2b646d" (UID: "5f89fe70-e439-44f2-9c8f-f665bb2b646d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.615408 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-config-data" (OuterVolumeSpecName: "config-data") pod "5f89fe70-e439-44f2-9c8f-f665bb2b646d" (UID: "5f89fe70-e439-44f2-9c8f-f665bb2b646d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.694606 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.694645 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsvpr\" (UniqueName: \"kubernetes.io/projected/5f89fe70-e439-44f2-9c8f-f665bb2b646d-kube-api-access-zsvpr\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.694654 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.694662 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f89fe70-e439-44f2-9c8f-f665bb2b646d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.778642 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.69:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:35 crc kubenswrapper[4811]: I0128 17:20:35.778916 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.69:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.046361 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-8f868" event={"ID":"5f89fe70-e439-44f2-9c8f-f665bb2b646d","Type":"ContainerDied","Data":"9b794f3e38fef0720890e36ba60e3348de1e49dc87d0552c8dd7b0efa7d90127"} Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.046408 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b794f3e38fef0720890e36ba60e3348de1e49dc87d0552c8dd7b0efa7d90127" Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.046559 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-8f868" Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.742912 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.743133 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-log" containerID="cri-o://83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5" gracePeriod=30 Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.743598 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-api" containerID="cri-o://5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca" gracePeriod=30 Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.760095 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.760352 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1633ecf6-e771-4579-9792-5ecc9c915f5d" containerName="nova-scheduler-scheduler" containerID="cri-o://aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6" gracePeriod=30 Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.831353 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.831596 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-log" containerID="cri-o://5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf" gracePeriod=30 Jan 28 17:20:36 crc kubenswrapper[4811]: I0128 17:20:36.831991 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-metadata" containerID="cri-o://5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575" gracePeriod=30 Jan 28 17:20:37 crc kubenswrapper[4811]: I0128 17:20:37.056250 4811 generic.go:334] "Generic (PLEG): container finished" podID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerID="83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5" exitCode=143 Jan 28 17:20:37 crc kubenswrapper[4811]: I0128 17:20:37.056343 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96355bd-116e-47cb-8b84-21b4e1f75b9b","Type":"ContainerDied","Data":"83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5"} Jan 28 17:20:38 crc kubenswrapper[4811]: I0128 17:20:38.065573 4811 generic.go:334] "Generic (PLEG): container finished" podID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerID="5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf" exitCode=143 Jan 28 17:20:38 crc kubenswrapper[4811]: I0128 17:20:38.065618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9a6e27c-3fc7-4dac-895b-755c65a397bb","Type":"ContainerDied","Data":"5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf"} Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.022113 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.102899 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-combined-ca-bundle\") pod \"1633ecf6-e771-4579-9792-5ecc9c915f5d\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.103044 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2b5m\" (UniqueName: \"kubernetes.io/projected/1633ecf6-e771-4579-9792-5ecc9c915f5d-kube-api-access-g2b5m\") pod \"1633ecf6-e771-4579-9792-5ecc9c915f5d\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.103090 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-config-data\") pod \"1633ecf6-e771-4579-9792-5ecc9c915f5d\" (UID: \"1633ecf6-e771-4579-9792-5ecc9c915f5d\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.112233 4811 generic.go:334] "Generic (PLEG): container finished" podID="1633ecf6-e771-4579-9792-5ecc9c915f5d" containerID="aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6" exitCode=0 Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.112290 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1633ecf6-e771-4579-9792-5ecc9c915f5d","Type":"ContainerDied","Data":"aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6"} Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.112327 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.112546 4811 scope.go:117] "RemoveContainer" containerID="aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.112525 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1633ecf6-e771-4579-9792-5ecc9c915f5d","Type":"ContainerDied","Data":"5f9dc02d8c9a3dd5945c88fd93ae98ce86b7d92beeb54b267d188bce51de4446"} Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.114713 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1633ecf6-e771-4579-9792-5ecc9c915f5d-kube-api-access-g2b5m" (OuterVolumeSpecName: "kube-api-access-g2b5m") pod "1633ecf6-e771-4579-9792-5ecc9c915f5d" (UID: "1633ecf6-e771-4579-9792-5ecc9c915f5d"). InnerVolumeSpecName "kube-api-access-g2b5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.136523 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1633ecf6-e771-4579-9792-5ecc9c915f5d" (UID: "1633ecf6-e771-4579-9792-5ecc9c915f5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.136938 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-config-data" (OuterVolumeSpecName: "config-data") pod "1633ecf6-e771-4579-9792-5ecc9c915f5d" (UID: "1633ecf6-e771-4579-9792-5ecc9c915f5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.204719 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.204759 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2b5m\" (UniqueName: \"kubernetes.io/projected/1633ecf6-e771-4579-9792-5ecc9c915f5d-kube-api-access-g2b5m\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.204773 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633ecf6-e771-4579-9792-5ecc9c915f5d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.255610 4811 scope.go:117] "RemoveContainer" containerID="aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6" Jan 28 17:20:41 crc kubenswrapper[4811]: E0128 17:20:41.256003 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6\": container with ID starting with aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6 not found: ID does not exist" containerID="aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.256038 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6"} err="failed to get container status \"aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6\": rpc error: code = NotFound desc = could not find container \"aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6\": container with ID starting with aed159568159eebd20b86d17e1976967e6b820bf91f3e025bbd3225e15dec6a6 not found: ID does not exist" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.480640 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.489234 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.509373 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:41 crc kubenswrapper[4811]: E0128 17:20:41.509753 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1633ecf6-e771-4579-9792-5ecc9c915f5d" containerName="nova-scheduler-scheduler" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.509766 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1633ecf6-e771-4579-9792-5ecc9c915f5d" containerName="nova-scheduler-scheduler" Jan 28 17:20:41 crc kubenswrapper[4811]: E0128 17:20:41.509793 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f89fe70-e439-44f2-9c8f-f665bb2b646d" containerName="nova-manage" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.509799 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f89fe70-e439-44f2-9c8f-f665bb2b646d" containerName="nova-manage" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.509965 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f89fe70-e439-44f2-9c8f-f665bb2b646d" containerName="nova-manage" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.509984 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1633ecf6-e771-4579-9792-5ecc9c915f5d" containerName="nova-scheduler-scheduler" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.510562 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.515587 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.519962 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.607486 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.610899 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.610985 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-config-data\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.611032 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wctfg\" (UniqueName: \"kubernetes.io/projected/a69222f8-b291-45e4-9be5-b7860cadb5db-kube-api-access-wctfg\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712140 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfc67\" (UniqueName: \"kubernetes.io/projected/a96355bd-116e-47cb-8b84-21b4e1f75b9b-kube-api-access-rfc67\") pod \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712218 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-combined-ca-bundle\") pod \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712358 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96355bd-116e-47cb-8b84-21b4e1f75b9b-logs\") pod \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712404 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-config-data\") pod \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\" (UID: \"a96355bd-116e-47cb-8b84-21b4e1f75b9b\") " Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712840 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712909 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-config-data\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.712941 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wctfg\" (UniqueName: \"kubernetes.io/projected/a69222f8-b291-45e4-9be5-b7860cadb5db-kube-api-access-wctfg\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.714512 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96355bd-116e-47cb-8b84-21b4e1f75b9b-logs" (OuterVolumeSpecName: "logs") pod "a96355bd-116e-47cb-8b84-21b4e1f75b9b" (UID: "a96355bd-116e-47cb-8b84-21b4e1f75b9b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.718873 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-config-data\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.718917 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.720576 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a96355bd-116e-47cb-8b84-21b4e1f75b9b-kube-api-access-rfc67" (OuterVolumeSpecName: "kube-api-access-rfc67") pod "a96355bd-116e-47cb-8b84-21b4e1f75b9b" (UID: "a96355bd-116e-47cb-8b84-21b4e1f75b9b"). InnerVolumeSpecName "kube-api-access-rfc67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.742367 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wctfg\" (UniqueName: \"kubernetes.io/projected/a69222f8-b291-45e4-9be5-b7860cadb5db-kube-api-access-wctfg\") pod \"nova-scheduler-0\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.744572 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a96355bd-116e-47cb-8b84-21b4e1f75b9b" (UID: "a96355bd-116e-47cb-8b84-21b4e1f75b9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.755077 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-config-data" (OuterVolumeSpecName: "config-data") pod "a96355bd-116e-47cb-8b84-21b4e1f75b9b" (UID: "a96355bd-116e-47cb-8b84-21b4e1f75b9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.814628 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.814672 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96355bd-116e-47cb-8b84-21b4e1f75b9b-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.814690 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96355bd-116e-47cb-8b84-21b4e1f75b9b-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.814707 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfc67\" (UniqueName: \"kubernetes.io/projected/a96355bd-116e-47cb-8b84-21b4e1f75b9b-kube-api-access-rfc67\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.853895 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:20:41 crc kubenswrapper[4811]: I0128 17:20:41.888549 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.017827 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbnhc\" (UniqueName: \"kubernetes.io/projected/b9a6e27c-3fc7-4dac-895b-755c65a397bb-kube-api-access-lbnhc\") pod \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.018041 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-combined-ca-bundle\") pod \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.018087 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-config-data\") pod \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.018219 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9a6e27c-3fc7-4dac-895b-755c65a397bb-logs\") pod \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\" (UID: \"b9a6e27c-3fc7-4dac-895b-755c65a397bb\") " Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.025962 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9a6e27c-3fc7-4dac-895b-755c65a397bb-logs" (OuterVolumeSpecName: "logs") pod "b9a6e27c-3fc7-4dac-895b-755c65a397bb" (UID: "b9a6e27c-3fc7-4dac-895b-755c65a397bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.027645 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9a6e27c-3fc7-4dac-895b-755c65a397bb-kube-api-access-lbnhc" (OuterVolumeSpecName: "kube-api-access-lbnhc") pod "b9a6e27c-3fc7-4dac-895b-755c65a397bb" (UID: "b9a6e27c-3fc7-4dac-895b-755c65a397bb"). InnerVolumeSpecName "kube-api-access-lbnhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.045325 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-config-data" (OuterVolumeSpecName: "config-data") pod "b9a6e27c-3fc7-4dac-895b-755c65a397bb" (UID: "b9a6e27c-3fc7-4dac-895b-755c65a397bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.047444 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9a6e27c-3fc7-4dac-895b-755c65a397bb" (UID: "b9a6e27c-3fc7-4dac-895b-755c65a397bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.120025 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.120068 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9a6e27c-3fc7-4dac-895b-755c65a397bb-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.120085 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9a6e27c-3fc7-4dac-895b-755c65a397bb-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.120098 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbnhc\" (UniqueName: \"kubernetes.io/projected/b9a6e27c-3fc7-4dac-895b-755c65a397bb-kube-api-access-lbnhc\") on node \"crc\" DevicePath \"\"" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.124701 4811 generic.go:334] "Generic (PLEG): container finished" podID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerID="5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca" exitCode=0 Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.124747 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96355bd-116e-47cb-8b84-21b4e1f75b9b","Type":"ContainerDied","Data":"5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca"} Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.124794 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96355bd-116e-47cb-8b84-21b4e1f75b9b","Type":"ContainerDied","Data":"3ee1c9c0733fcf1f8802191ab6d3366abb046a91c931307cfdab8ebef9eb6aaf"} Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.124818 4811 scope.go:117] "RemoveContainer" containerID="5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.125179 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.131247 4811 generic.go:334] "Generic (PLEG): container finished" podID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerID="5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575" exitCode=0 Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.131300 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.131302 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9a6e27c-3fc7-4dac-895b-755c65a397bb","Type":"ContainerDied","Data":"5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575"} Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.131464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9a6e27c-3fc7-4dac-895b-755c65a397bb","Type":"ContainerDied","Data":"72210e4667ffd000c2725e73fa566bd54396af3ff5cd86cd140b67a5d73c457d"} Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.154085 4811 scope.go:117] "RemoveContainer" containerID="83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.185530 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.206517 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.207917 4811 scope.go:117] "RemoveContainer" containerID="5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.208913 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca\": container with ID starting with 5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca not found: ID does not exist" containerID="5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.209017 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca"} err="failed to get container status \"5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca\": rpc error: code = NotFound desc = could not find container \"5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca\": container with ID starting with 5f2847dec248d2ee1319f4976e1d668fc62d04c2f2403798a52ade5675656bca not found: ID does not exist" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.209109 4811 scope.go:117] "RemoveContainer" containerID="83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.209704 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5\": container with ID starting with 83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5 not found: ID does not exist" containerID="83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.213729 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5"} err="failed to get container status \"83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5\": rpc error: code = NotFound desc = could not find container \"83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5\": container with ID starting with 83a910aa54298e4909fde71217e5bb85167f144c342b81f09957ff092f79bcc5 not found: ID does not exist" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.213815 4811 scope.go:117] "RemoveContainer" containerID="5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.228440 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.229043 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-log" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229061 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-log" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.229078 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-log" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229086 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-log" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.229100 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-metadata" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229106 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-metadata" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.229132 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-api" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229139 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-api" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229482 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-metadata" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229496 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-api" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229519 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" containerName="nova-api-log" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.229533 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" containerName="nova-metadata-log" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.231254 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.235967 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.254401 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.260680 4811 scope.go:117] "RemoveContainer" containerID="5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.279256 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.284860 4811 scope.go:117] "RemoveContainer" containerID="5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.285505 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575\": container with ID starting with 5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575 not found: ID does not exist" containerID="5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.285550 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575"} err="failed to get container status \"5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575\": rpc error: code = NotFound desc = could not find container \"5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575\": container with ID starting with 5b260578f202ef59536a7dfe75c9e6f7ee14cb5b906d8967b3833ba313678575 not found: ID does not exist" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.285575 4811 scope.go:117] "RemoveContainer" containerID="5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf" Jan 28 17:20:42 crc kubenswrapper[4811]: E0128 17:20:42.285826 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf\": container with ID starting with 5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf not found: ID does not exist" containerID="5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.285842 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf"} err="failed to get container status \"5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf\": rpc error: code = NotFound desc = could not find container \"5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf\": container with ID starting with 5b9cae204aa81ac706818898c06fd472c01d0ffe56e15e38942f84c01c017dbf not found: ID does not exist" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.290927 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.301423 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.303090 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.307073 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.325502 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.329994 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c927b\" (UniqueName: \"kubernetes.io/projected/3c5dba6c-abfe-4686-895f-e051268d21db-kube-api-access-c927b\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.330043 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5dba6c-abfe-4686-895f-e051268d21db-logs\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.330097 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-config-data\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.330390 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.354261 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1633ecf6-e771-4579-9792-5ecc9c915f5d" path="/var/lib/kubelet/pods/1633ecf6-e771-4579-9792-5ecc9c915f5d/volumes" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.354985 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a96355bd-116e-47cb-8b84-21b4e1f75b9b" path="/var/lib/kubelet/pods/a96355bd-116e-47cb-8b84-21b4e1f75b9b/volumes" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.355699 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9a6e27c-3fc7-4dac-895b-755c65a397bb" path="/var/lib/kubelet/pods/b9a6e27c-3fc7-4dac-895b-755c65a397bb/volumes" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.356971 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432490 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-config-data\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432537 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432586 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c927b\" (UniqueName: \"kubernetes.io/projected/3c5dba6c-abfe-4686-895f-e051268d21db-kube-api-access-c927b\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432616 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5dba6c-abfe-4686-895f-e051268d21db-logs\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432644 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41723cdb-d2fc-4716-9df8-2210e796b3aa-logs\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432668 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sgcp\" (UniqueName: \"kubernetes.io/projected/41723cdb-d2fc-4716-9df8-2210e796b3aa-kube-api-access-6sgcp\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432719 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-config-data\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.432762 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.433719 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5dba6c-abfe-4686-895f-e051268d21db-logs\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.436820 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.437810 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-config-data\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.448824 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c927b\" (UniqueName: \"kubernetes.io/projected/3c5dba6c-abfe-4686-895f-e051268d21db-kube-api-access-c927b\") pod \"nova-api-0\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.533841 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41723cdb-d2fc-4716-9df8-2210e796b3aa-logs\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.533878 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sgcp\" (UniqueName: \"kubernetes.io/projected/41723cdb-d2fc-4716-9df8-2210e796b3aa-kube-api-access-6sgcp\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.533944 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.534009 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-config-data\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.534251 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41723cdb-d2fc-4716-9df8-2210e796b3aa-logs\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.537091 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.537797 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-config-data\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.552190 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sgcp\" (UniqueName: \"kubernetes.io/projected/41723cdb-d2fc-4716-9df8-2210e796b3aa-kube-api-access-6sgcp\") pod \"nova-metadata-0\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " pod="openstack/nova-metadata-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.552588 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:20:42 crc kubenswrapper[4811]: I0128 17:20:42.626891 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.005961 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.125577 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.143719 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41723cdb-d2fc-4716-9df8-2210e796b3aa","Type":"ContainerStarted","Data":"ba6c4ccb0fda13e0da137a20855ff3df2b3e7213e2b620cc07aa65d7f432ad41"} Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.146155 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5dba6c-abfe-4686-895f-e051268d21db","Type":"ContainerStarted","Data":"08585d3861746d90706c8d577ba84848623cff84c7c6f3278f3ee288185572b0"} Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.149958 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a69222f8-b291-45e4-9be5-b7860cadb5db","Type":"ContainerStarted","Data":"8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d"} Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.149988 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a69222f8-b291-45e4-9be5-b7860cadb5db","Type":"ContainerStarted","Data":"0e1ee62884a166d1544e02cda354db371fc10502a2df77e8c644569a67fb9797"} Jan 28 17:20:43 crc kubenswrapper[4811]: I0128 17:20:43.171953 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.171934099 podStartE2EDuration="2.171934099s" podCreationTimestamp="2026-01-28 17:20:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:43.165849072 +0000 UTC m=+5735.920212665" watchObservedRunningTime="2026-01-28 17:20:43.171934099 +0000 UTC m=+5735.926297682" Jan 28 17:20:44 crc kubenswrapper[4811]: I0128 17:20:44.164418 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41723cdb-d2fc-4716-9df8-2210e796b3aa","Type":"ContainerStarted","Data":"7f5fa9f058e9294267306c7a5cc31d03d67e84936435375d194e2ca7fe5bb870"} Jan 28 17:20:44 crc kubenswrapper[4811]: I0128 17:20:44.164777 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41723cdb-d2fc-4716-9df8-2210e796b3aa","Type":"ContainerStarted","Data":"9ab9e5bffe658dac1fa8b9977f65977c2b05ef3b52f36519b307f272ab1dc9b7"} Jan 28 17:20:44 crc kubenswrapper[4811]: I0128 17:20:44.166837 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5dba6c-abfe-4686-895f-e051268d21db","Type":"ContainerStarted","Data":"faf527c4d505cb091f08bf0e1f0bb44e3fe1cc078499bb4dd9817b9ab4d442f8"} Jan 28 17:20:44 crc kubenswrapper[4811]: I0128 17:20:44.166927 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5dba6c-abfe-4686-895f-e051268d21db","Type":"ContainerStarted","Data":"cee7a9728b8d6d7b06e26029bbc1e36dfa9b9afd001fc2cf2f67008a69ef4a09"} Jan 28 17:20:44 crc kubenswrapper[4811]: I0128 17:20:44.195079 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.195023972 podStartE2EDuration="2.195023972s" podCreationTimestamp="2026-01-28 17:20:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:44.185289717 +0000 UTC m=+5736.939653310" watchObservedRunningTime="2026-01-28 17:20:44.195023972 +0000 UTC m=+5736.949387555" Jan 28 17:20:44 crc kubenswrapper[4811]: I0128 17:20:44.203886 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.203860013 podStartE2EDuration="2.203860013s" podCreationTimestamp="2026-01-28 17:20:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:20:44.199764812 +0000 UTC m=+5736.954128395" watchObservedRunningTime="2026-01-28 17:20:44.203860013 +0000 UTC m=+5736.958223616" Jan 28 17:20:45 crc kubenswrapper[4811]: I0128 17:20:45.343212 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:20:45 crc kubenswrapper[4811]: E0128 17:20:45.344313 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:20:46 crc kubenswrapper[4811]: I0128 17:20:46.854635 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 17:20:47 crc kubenswrapper[4811]: I0128 17:20:47.627377 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:20:47 crc kubenswrapper[4811]: I0128 17:20:47.627748 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:20:51 crc kubenswrapper[4811]: I0128 17:20:51.854642 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 28 17:20:51 crc kubenswrapper[4811]: I0128 17:20:51.881589 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 28 17:20:52 crc kubenswrapper[4811]: I0128 17:20:52.262642 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 28 17:20:52 crc kubenswrapper[4811]: I0128 17:20:52.553717 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 17:20:52 crc kubenswrapper[4811]: I0128 17:20:52.553793 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 17:20:52 crc kubenswrapper[4811]: I0128 17:20:52.629675 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 17:20:52 crc kubenswrapper[4811]: I0128 17:20:52.629757 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 17:20:53 crc kubenswrapper[4811]: I0128 17:20:53.595632 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:53 crc kubenswrapper[4811]: I0128 17:20:53.595888 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:53 crc kubenswrapper[4811]: I0128 17:20:53.710730 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:53 crc kubenswrapper[4811]: I0128 17:20:53.710709 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:20:58 crc kubenswrapper[4811]: I0128 17:20:58.346645 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:20:58 crc kubenswrapper[4811]: E0128 17:20:58.347410 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.556719 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.557139 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.558088 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.558136 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.561285 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.561349 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.629199 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.629514 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.633903 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.645909 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.769848 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84f785c5f9-x2hfm"] Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.771591 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.835864 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84f785c5f9-x2hfm"] Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.915910 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b78ns\" (UniqueName: \"kubernetes.io/projected/27af52ad-5ab2-40c7-bbe3-da26be337671-kube-api-access-b78ns\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.915988 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-dns-svc\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.916169 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-config\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.916228 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-sb\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:02 crc kubenswrapper[4811]: I0128 17:21:02.916369 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-nb\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.018668 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b78ns\" (UniqueName: \"kubernetes.io/projected/27af52ad-5ab2-40c7-bbe3-da26be337671-kube-api-access-b78ns\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.018716 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-dns-svc\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.018763 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-config\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.018784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-sb\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.018802 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-nb\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.019735 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-config\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.019945 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-nb\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.019969 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-sb\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.020088 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-dns-svc\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.043253 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b78ns\" (UniqueName: \"kubernetes.io/projected/27af52ad-5ab2-40c7-bbe3-da26be337671-kube-api-access-b78ns\") pod \"dnsmasq-dns-84f785c5f9-x2hfm\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:03 crc kubenswrapper[4811]: I0128 17:21:03.105150 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:04 crc kubenswrapper[4811]: I0128 17:21:03.587849 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84f785c5f9-x2hfm"] Jan 28 17:21:04 crc kubenswrapper[4811]: I0128 17:21:04.352272 4811 generic.go:334] "Generic (PLEG): container finished" podID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerID="9eae49f8d7d4246d4a2fc5358ecf59e5326701f58608eda160c946d11256adfa" exitCode=0 Jan 28 17:21:04 crc kubenswrapper[4811]: I0128 17:21:04.358558 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" event={"ID":"27af52ad-5ab2-40c7-bbe3-da26be337671","Type":"ContainerDied","Data":"9eae49f8d7d4246d4a2fc5358ecf59e5326701f58608eda160c946d11256adfa"} Jan 28 17:21:04 crc kubenswrapper[4811]: I0128 17:21:04.358603 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" event={"ID":"27af52ad-5ab2-40c7-bbe3-da26be337671","Type":"ContainerStarted","Data":"8404811a6d794ba2ab93b2b6f05abab1a0e76ef259b666ce9acf381df36b7b0d"} Jan 28 17:21:05 crc kubenswrapper[4811]: I0128 17:21:05.368338 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" event={"ID":"27af52ad-5ab2-40c7-bbe3-da26be337671","Type":"ContainerStarted","Data":"0f126e6c7406b44f4628f87c09faf6cc16954432020ced28886950f01581ffd9"} Jan 28 17:21:05 crc kubenswrapper[4811]: I0128 17:21:05.368635 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:05 crc kubenswrapper[4811]: I0128 17:21:05.384360 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" podStartSLOduration=3.384343195 podStartE2EDuration="3.384343195s" podCreationTimestamp="2026-01-28 17:21:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:05.382813783 +0000 UTC m=+5758.137177386" watchObservedRunningTime="2026-01-28 17:21:05.384343195 +0000 UTC m=+5758.138706768" Jan 28 17:21:09 crc kubenswrapper[4811]: I0128 17:21:09.339869 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:21:09 crc kubenswrapper[4811]: E0128 17:21:09.340520 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.106316 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.225306 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b4c886967-f7ls6"] Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.225576 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerName="dnsmasq-dns" containerID="cri-o://a0f254f1d1cfdb701aa081f1de29bf741e92df4d4638fe1db765abd47b04f717" gracePeriod=10 Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.440063 4811 generic.go:334] "Generic (PLEG): container finished" podID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerID="a0f254f1d1cfdb701aa081f1de29bf741e92df4d4638fe1db765abd47b04f717" exitCode=0 Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.440111 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" event={"ID":"10fa4a6b-1328-48af-a375-ad6486d2eff2","Type":"ContainerDied","Data":"a0f254f1d1cfdb701aa081f1de29bf741e92df4d4638fe1db765abd47b04f717"} Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.752232 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.907605 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-config\") pod \"10fa4a6b-1328-48af-a375-ad6486d2eff2\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.907751 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-dns-svc\") pod \"10fa4a6b-1328-48af-a375-ad6486d2eff2\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.907818 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-nb\") pod \"10fa4a6b-1328-48af-a375-ad6486d2eff2\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.907871 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jj27z\" (UniqueName: \"kubernetes.io/projected/10fa4a6b-1328-48af-a375-ad6486d2eff2-kube-api-access-jj27z\") pod \"10fa4a6b-1328-48af-a375-ad6486d2eff2\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.907887 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-sb\") pod \"10fa4a6b-1328-48af-a375-ad6486d2eff2\" (UID: \"10fa4a6b-1328-48af-a375-ad6486d2eff2\") " Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.913398 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10fa4a6b-1328-48af-a375-ad6486d2eff2-kube-api-access-jj27z" (OuterVolumeSpecName: "kube-api-access-jj27z") pod "10fa4a6b-1328-48af-a375-ad6486d2eff2" (UID: "10fa4a6b-1328-48af-a375-ad6486d2eff2"). InnerVolumeSpecName "kube-api-access-jj27z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.952904 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-config" (OuterVolumeSpecName: "config") pod "10fa4a6b-1328-48af-a375-ad6486d2eff2" (UID: "10fa4a6b-1328-48af-a375-ad6486d2eff2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.956885 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "10fa4a6b-1328-48af-a375-ad6486d2eff2" (UID: "10fa4a6b-1328-48af-a375-ad6486d2eff2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.960425 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "10fa4a6b-1328-48af-a375-ad6486d2eff2" (UID: "10fa4a6b-1328-48af-a375-ad6486d2eff2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:13 crc kubenswrapper[4811]: I0128 17:21:13.973688 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "10fa4a6b-1328-48af-a375-ad6486d2eff2" (UID: "10fa4a6b-1328-48af-a375-ad6486d2eff2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.009374 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.009408 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.009458 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jj27z\" (UniqueName: \"kubernetes.io/projected/10fa4a6b-1328-48af-a375-ad6486d2eff2-kube-api-access-jj27z\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.009471 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.009481 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10fa4a6b-1328-48af-a375-ad6486d2eff2-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.450557 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" event={"ID":"10fa4a6b-1328-48af-a375-ad6486d2eff2","Type":"ContainerDied","Data":"f6e878a52839527008a6afbe866de054f02ebd4e77ecd6ea8e8f5f5a53eb4b99"} Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.450870 4811 scope.go:117] "RemoveContainer" containerID="a0f254f1d1cfdb701aa081f1de29bf741e92df4d4638fe1db765abd47b04f717" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.451034 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4c886967-f7ls6" Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.480060 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b4c886967-f7ls6"] Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.490156 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b4c886967-f7ls6"] Jan 28 17:21:14 crc kubenswrapper[4811]: I0128 17:21:14.493301 4811 scope.go:117] "RemoveContainer" containerID="86d338e8030441eec65c067ec8b82a8500ea606f2f4fbfe17b6bad6421c0c01c" Jan 28 17:21:15 crc kubenswrapper[4811]: I0128 17:21:15.902940 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-cfmpw"] Jan 28 17:21:15 crc kubenswrapper[4811]: E0128 17:21:15.903821 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerName="dnsmasq-dns" Jan 28 17:21:15 crc kubenswrapper[4811]: I0128 17:21:15.903842 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerName="dnsmasq-dns" Jan 28 17:21:15 crc kubenswrapper[4811]: E0128 17:21:15.903866 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerName="init" Jan 28 17:21:15 crc kubenswrapper[4811]: I0128 17:21:15.903874 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerName="init" Jan 28 17:21:15 crc kubenswrapper[4811]: I0128 17:21:15.905911 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" containerName="dnsmasq-dns" Jan 28 17:21:15 crc kubenswrapper[4811]: I0128 17:21:15.906738 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:15 crc kubenswrapper[4811]: I0128 17:21:15.919867 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cfmpw"] Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.004213 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-13c9-account-create-update-mstz8"] Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.005629 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.007711 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.014372 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-13c9-account-create-update-mstz8"] Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.047800 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndxdm\" (UniqueName: \"kubernetes.io/projected/edeedc0e-2bfa-41da-917a-3e4c0a476dce-kube-api-access-ndxdm\") pod \"cinder-db-create-cfmpw\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.047897 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/edeedc0e-2bfa-41da-917a-3e4c0a476dce-operator-scripts\") pod \"cinder-db-create-cfmpw\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.149479 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndxdm\" (UniqueName: \"kubernetes.io/projected/edeedc0e-2bfa-41da-917a-3e4c0a476dce-kube-api-access-ndxdm\") pod \"cinder-db-create-cfmpw\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.149550 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/edeedc0e-2bfa-41da-917a-3e4c0a476dce-operator-scripts\") pod \"cinder-db-create-cfmpw\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.149595 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq85b\" (UniqueName: \"kubernetes.io/projected/3d9f16ac-879b-4d4e-885d-60725aa32ff0-kube-api-access-cq85b\") pod \"cinder-13c9-account-create-update-mstz8\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.149680 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d9f16ac-879b-4d4e-885d-60725aa32ff0-operator-scripts\") pod \"cinder-13c9-account-create-update-mstz8\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.150325 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/edeedc0e-2bfa-41da-917a-3e4c0a476dce-operator-scripts\") pod \"cinder-db-create-cfmpw\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.171554 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndxdm\" (UniqueName: \"kubernetes.io/projected/edeedc0e-2bfa-41da-917a-3e4c0a476dce-kube-api-access-ndxdm\") pod \"cinder-db-create-cfmpw\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.234943 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.251363 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d9f16ac-879b-4d4e-885d-60725aa32ff0-operator-scripts\") pod \"cinder-13c9-account-create-update-mstz8\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.251521 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq85b\" (UniqueName: \"kubernetes.io/projected/3d9f16ac-879b-4d4e-885d-60725aa32ff0-kube-api-access-cq85b\") pod \"cinder-13c9-account-create-update-mstz8\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.252355 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d9f16ac-879b-4d4e-885d-60725aa32ff0-operator-scripts\") pod \"cinder-13c9-account-create-update-mstz8\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.270194 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq85b\" (UniqueName: \"kubernetes.io/projected/3d9f16ac-879b-4d4e-885d-60725aa32ff0-kube-api-access-cq85b\") pod \"cinder-13c9-account-create-update-mstz8\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.327111 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.353496 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10fa4a6b-1328-48af-a375-ad6486d2eff2" path="/var/lib/kubelet/pods/10fa4a6b-1328-48af-a375-ad6486d2eff2/volumes" Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.698506 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cfmpw"] Jan 28 17:21:16 crc kubenswrapper[4811]: I0128 17:21:16.799024 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-13c9-account-create-update-mstz8"] Jan 28 17:21:16 crc kubenswrapper[4811]: W0128 17:21:16.801150 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d9f16ac_879b_4d4e_885d_60725aa32ff0.slice/crio-31aaa62e09909babb4fe92ad84d4604bd3c8014f4bae46fe25188dfdf19e64b7 WatchSource:0}: Error finding container 31aaa62e09909babb4fe92ad84d4604bd3c8014f4bae46fe25188dfdf19e64b7: Status 404 returned error can't find the container with id 31aaa62e09909babb4fe92ad84d4604bd3c8014f4bae46fe25188dfdf19e64b7 Jan 28 17:21:17 crc kubenswrapper[4811]: I0128 17:21:17.483992 4811 generic.go:334] "Generic (PLEG): container finished" podID="3d9f16ac-879b-4d4e-885d-60725aa32ff0" containerID="e5be4970d675ea3a1b30f54d59a28b25c5eba929d4ca769f28d06abc1a9bbce6" exitCode=0 Jan 28 17:21:17 crc kubenswrapper[4811]: I0128 17:21:17.484145 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-13c9-account-create-update-mstz8" event={"ID":"3d9f16ac-879b-4d4e-885d-60725aa32ff0","Type":"ContainerDied","Data":"e5be4970d675ea3a1b30f54d59a28b25c5eba929d4ca769f28d06abc1a9bbce6"} Jan 28 17:21:17 crc kubenswrapper[4811]: I0128 17:21:17.484255 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-13c9-account-create-update-mstz8" event={"ID":"3d9f16ac-879b-4d4e-885d-60725aa32ff0","Type":"ContainerStarted","Data":"31aaa62e09909babb4fe92ad84d4604bd3c8014f4bae46fe25188dfdf19e64b7"} Jan 28 17:21:17 crc kubenswrapper[4811]: I0128 17:21:17.486371 4811 generic.go:334] "Generic (PLEG): container finished" podID="edeedc0e-2bfa-41da-917a-3e4c0a476dce" containerID="c82ff5392b67c12645ec2497ef567d229bf549428cce643b04be5cd087d861d7" exitCode=0 Jan 28 17:21:17 crc kubenswrapper[4811]: I0128 17:21:17.486424 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cfmpw" event={"ID":"edeedc0e-2bfa-41da-917a-3e4c0a476dce","Type":"ContainerDied","Data":"c82ff5392b67c12645ec2497ef567d229bf549428cce643b04be5cd087d861d7"} Jan 28 17:21:17 crc kubenswrapper[4811]: I0128 17:21:17.486518 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cfmpw" event={"ID":"edeedc0e-2bfa-41da-917a-3e4c0a476dce","Type":"ContainerStarted","Data":"0db3ed559e25eedc1eb6e3113da10e6e9ae08a29e566d9bcef6d0434c8033062"} Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.906275 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.914323 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.997129 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d9f16ac-879b-4d4e-885d-60725aa32ff0-operator-scripts\") pod \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.997203 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq85b\" (UniqueName: \"kubernetes.io/projected/3d9f16ac-879b-4d4e-885d-60725aa32ff0-kube-api-access-cq85b\") pod \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\" (UID: \"3d9f16ac-879b-4d4e-885d-60725aa32ff0\") " Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.997290 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/edeedc0e-2bfa-41da-917a-3e4c0a476dce-operator-scripts\") pod \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.997348 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndxdm\" (UniqueName: \"kubernetes.io/projected/edeedc0e-2bfa-41da-917a-3e4c0a476dce-kube-api-access-ndxdm\") pod \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\" (UID: \"edeedc0e-2bfa-41da-917a-3e4c0a476dce\") " Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.998117 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edeedc0e-2bfa-41da-917a-3e4c0a476dce-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "edeedc0e-2bfa-41da-917a-3e4c0a476dce" (UID: "edeedc0e-2bfa-41da-917a-3e4c0a476dce"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:18 crc kubenswrapper[4811]: I0128 17:21:18.998627 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d9f16ac-879b-4d4e-885d-60725aa32ff0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3d9f16ac-879b-4d4e-885d-60725aa32ff0" (UID: "3d9f16ac-879b-4d4e-885d-60725aa32ff0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.003554 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edeedc0e-2bfa-41da-917a-3e4c0a476dce-kube-api-access-ndxdm" (OuterVolumeSpecName: "kube-api-access-ndxdm") pod "edeedc0e-2bfa-41da-917a-3e4c0a476dce" (UID: "edeedc0e-2bfa-41da-917a-3e4c0a476dce"). InnerVolumeSpecName "kube-api-access-ndxdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.003644 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d9f16ac-879b-4d4e-885d-60725aa32ff0-kube-api-access-cq85b" (OuterVolumeSpecName: "kube-api-access-cq85b") pod "3d9f16ac-879b-4d4e-885d-60725aa32ff0" (UID: "3d9f16ac-879b-4d4e-885d-60725aa32ff0"). InnerVolumeSpecName "kube-api-access-cq85b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.100187 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndxdm\" (UniqueName: \"kubernetes.io/projected/edeedc0e-2bfa-41da-917a-3e4c0a476dce-kube-api-access-ndxdm\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.100428 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d9f16ac-879b-4d4e-885d-60725aa32ff0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.100544 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq85b\" (UniqueName: \"kubernetes.io/projected/3d9f16ac-879b-4d4e-885d-60725aa32ff0-kube-api-access-cq85b\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.100615 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/edeedc0e-2bfa-41da-917a-3e4c0a476dce-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.509728 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfmpw" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.509852 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cfmpw" event={"ID":"edeedc0e-2bfa-41da-917a-3e4c0a476dce","Type":"ContainerDied","Data":"0db3ed559e25eedc1eb6e3113da10e6e9ae08a29e566d9bcef6d0434c8033062"} Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.509954 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0db3ed559e25eedc1eb6e3113da10e6e9ae08a29e566d9bcef6d0434c8033062" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.513308 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-13c9-account-create-update-mstz8" event={"ID":"3d9f16ac-879b-4d4e-885d-60725aa32ff0","Type":"ContainerDied","Data":"31aaa62e09909babb4fe92ad84d4604bd3c8014f4bae46fe25188dfdf19e64b7"} Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.513366 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31aaa62e09909babb4fe92ad84d4604bd3c8014f4bae46fe25188dfdf19e64b7" Jan 28 17:21:19 crc kubenswrapper[4811]: I0128 17:21:19.513379 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-13c9-account-create-update-mstz8" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.243875 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-5mqx7"] Jan 28 17:21:21 crc kubenswrapper[4811]: E0128 17:21:21.244456 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edeedc0e-2bfa-41da-917a-3e4c0a476dce" containerName="mariadb-database-create" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.244469 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="edeedc0e-2bfa-41da-917a-3e4c0a476dce" containerName="mariadb-database-create" Jan 28 17:21:21 crc kubenswrapper[4811]: E0128 17:21:21.244481 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d9f16ac-879b-4d4e-885d-60725aa32ff0" containerName="mariadb-account-create-update" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.244487 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d9f16ac-879b-4d4e-885d-60725aa32ff0" containerName="mariadb-account-create-update" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.244654 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d9f16ac-879b-4d4e-885d-60725aa32ff0" containerName="mariadb-account-create-update" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.244672 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="edeedc0e-2bfa-41da-917a-3e4c0a476dce" containerName="mariadb-database-create" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.245215 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.247116 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.247870 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-txgqv" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.263551 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5mqx7"] Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.267641 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.336379 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-scripts\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.336508 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce15f0de-cdee-4305-a526-f8c9e146aa60-etc-machine-id\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.336558 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-config-data\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.336580 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-combined-ca-bundle\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.336609 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-db-sync-config-data\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.336627 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmvw8\" (UniqueName: \"kubernetes.io/projected/ce15f0de-cdee-4305-a526-f8c9e146aa60-kube-api-access-dmvw8\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.438479 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-db-sync-config-data\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.438764 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmvw8\" (UniqueName: \"kubernetes.io/projected/ce15f0de-cdee-4305-a526-f8c9e146aa60-kube-api-access-dmvw8\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.438876 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-scripts\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.439055 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce15f0de-cdee-4305-a526-f8c9e146aa60-etc-machine-id\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.439158 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce15f0de-cdee-4305-a526-f8c9e146aa60-etc-machine-id\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.439201 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-config-data\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.439416 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-combined-ca-bundle\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.446068 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-db-sync-config-data\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.446091 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-scripts\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.446209 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-combined-ca-bundle\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.456382 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-config-data\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.456806 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmvw8\" (UniqueName: \"kubernetes.io/projected/ce15f0de-cdee-4305-a526-f8c9e146aa60-kube-api-access-dmvw8\") pod \"cinder-db-sync-5mqx7\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:21 crc kubenswrapper[4811]: I0128 17:21:21.564076 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:22 crc kubenswrapper[4811]: W0128 17:21:22.062047 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce15f0de_cdee_4305_a526_f8c9e146aa60.slice/crio-33d07ea632a7abd22083cb897549e768e4d622e8b0c0620d1992085738a0aab7 WatchSource:0}: Error finding container 33d07ea632a7abd22083cb897549e768e4d622e8b0c0620d1992085738a0aab7: Status 404 returned error can't find the container with id 33d07ea632a7abd22083cb897549e768e4d622e8b0c0620d1992085738a0aab7 Jan 28 17:21:22 crc kubenswrapper[4811]: I0128 17:21:22.066686 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5mqx7"] Jan 28 17:21:22 crc kubenswrapper[4811]: I0128 17:21:22.340104 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:21:22 crc kubenswrapper[4811]: E0128 17:21:22.340773 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:21:22 crc kubenswrapper[4811]: I0128 17:21:22.539276 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5mqx7" event={"ID":"ce15f0de-cdee-4305-a526-f8c9e146aa60","Type":"ContainerStarted","Data":"33d07ea632a7abd22083cb897549e768e4d622e8b0c0620d1992085738a0aab7"} Jan 28 17:21:23 crc kubenswrapper[4811]: I0128 17:21:23.551094 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5mqx7" event={"ID":"ce15f0de-cdee-4305-a526-f8c9e146aa60","Type":"ContainerStarted","Data":"42ea12e57d1b458c7b857839c782e307c67ae0ed120a8ba54f6d34339c29e6d9"} Jan 28 17:21:23 crc kubenswrapper[4811]: I0128 17:21:23.580179 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-5mqx7" podStartSLOduration=2.580159989 podStartE2EDuration="2.580159989s" podCreationTimestamp="2026-01-28 17:21:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:23.57434032 +0000 UTC m=+5776.328703903" watchObservedRunningTime="2026-01-28 17:21:23.580159989 +0000 UTC m=+5776.334523572" Jan 28 17:21:25 crc kubenswrapper[4811]: I0128 17:21:25.570924 4811 generic.go:334] "Generic (PLEG): container finished" podID="ce15f0de-cdee-4305-a526-f8c9e146aa60" containerID="42ea12e57d1b458c7b857839c782e307c67ae0ed120a8ba54f6d34339c29e6d9" exitCode=0 Jan 28 17:21:25 crc kubenswrapper[4811]: I0128 17:21:25.571013 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5mqx7" event={"ID":"ce15f0de-cdee-4305-a526-f8c9e146aa60","Type":"ContainerDied","Data":"42ea12e57d1b458c7b857839c782e307c67ae0ed120a8ba54f6d34339c29e6d9"} Jan 28 17:21:26 crc kubenswrapper[4811]: I0128 17:21:26.986665 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147478 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-config-data\") pod \"ce15f0de-cdee-4305-a526-f8c9e146aa60\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147538 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-scripts\") pod \"ce15f0de-cdee-4305-a526-f8c9e146aa60\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147558 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-db-sync-config-data\") pod \"ce15f0de-cdee-4305-a526-f8c9e146aa60\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147629 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmvw8\" (UniqueName: \"kubernetes.io/projected/ce15f0de-cdee-4305-a526-f8c9e146aa60-kube-api-access-dmvw8\") pod \"ce15f0de-cdee-4305-a526-f8c9e146aa60\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147715 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce15f0de-cdee-4305-a526-f8c9e146aa60-etc-machine-id\") pod \"ce15f0de-cdee-4305-a526-f8c9e146aa60\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147772 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-combined-ca-bundle\") pod \"ce15f0de-cdee-4305-a526-f8c9e146aa60\" (UID: \"ce15f0de-cdee-4305-a526-f8c9e146aa60\") " Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.147924 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ce15f0de-cdee-4305-a526-f8c9e146aa60-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ce15f0de-cdee-4305-a526-f8c9e146aa60" (UID: "ce15f0de-cdee-4305-a526-f8c9e146aa60"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.148487 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce15f0de-cdee-4305-a526-f8c9e146aa60-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.154830 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-scripts" (OuterVolumeSpecName: "scripts") pod "ce15f0de-cdee-4305-a526-f8c9e146aa60" (UID: "ce15f0de-cdee-4305-a526-f8c9e146aa60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.155234 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ce15f0de-cdee-4305-a526-f8c9e146aa60" (UID: "ce15f0de-cdee-4305-a526-f8c9e146aa60"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.156573 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce15f0de-cdee-4305-a526-f8c9e146aa60-kube-api-access-dmvw8" (OuterVolumeSpecName: "kube-api-access-dmvw8") pod "ce15f0de-cdee-4305-a526-f8c9e146aa60" (UID: "ce15f0de-cdee-4305-a526-f8c9e146aa60"). InnerVolumeSpecName "kube-api-access-dmvw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.180600 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce15f0de-cdee-4305-a526-f8c9e146aa60" (UID: "ce15f0de-cdee-4305-a526-f8c9e146aa60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.199748 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-config-data" (OuterVolumeSpecName: "config-data") pod "ce15f0de-cdee-4305-a526-f8c9e146aa60" (UID: "ce15f0de-cdee-4305-a526-f8c9e146aa60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.250474 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.250504 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.250516 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.250526 4811 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ce15f0de-cdee-4305-a526-f8c9e146aa60-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.250536 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmvw8\" (UniqueName: \"kubernetes.io/projected/ce15f0de-cdee-4305-a526-f8c9e146aa60-kube-api-access-dmvw8\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.591103 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5mqx7" event={"ID":"ce15f0de-cdee-4305-a526-f8c9e146aa60","Type":"ContainerDied","Data":"33d07ea632a7abd22083cb897549e768e4d622e8b0c0620d1992085738a0aab7"} Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.591338 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33d07ea632a7abd22083cb897549e768e4d622e8b0c0620d1992085738a0aab7" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.591145 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5mqx7" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.955542 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86bcb6d7c9-hdsrp"] Jan 28 17:21:27 crc kubenswrapper[4811]: E0128 17:21:27.956089 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce15f0de-cdee-4305-a526-f8c9e146aa60" containerName="cinder-db-sync" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.956116 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce15f0de-cdee-4305-a526-f8c9e146aa60" containerName="cinder-db-sync" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.956374 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce15f0de-cdee-4305-a526-f8c9e146aa60" containerName="cinder-db-sync" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.963925 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:27 crc kubenswrapper[4811]: I0128 17:21:27.970866 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bcb6d7c9-hdsrp"] Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.066475 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-sb\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.066600 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-dns-svc\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.066638 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-config\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.066672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-nb\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.066716 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxxd5\" (UniqueName: \"kubernetes.io/projected/c71148c2-9145-4207-841f-f4931e513205-kube-api-access-wxxd5\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.089870 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.092339 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.100508 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-txgqv" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.100568 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.100668 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.100756 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.109673 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.167925 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-config\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168012 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-nb\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168062 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxxd5\" (UniqueName: \"kubernetes.io/projected/c71148c2-9145-4207-841f-f4931e513205-kube-api-access-wxxd5\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168108 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-scripts\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168128 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168147 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/017a6c23-059b-4451-a487-660ef4033bc1-logs\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168212 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glff5\" (UniqueName: \"kubernetes.io/projected/017a6c23-059b-4451-a487-660ef4033bc1-kube-api-access-glff5\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168364 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168526 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-sb\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168585 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data-custom\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168815 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-dns-svc\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.168858 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/017a6c23-059b-4451-a487-660ef4033bc1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.169069 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-config\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.169539 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-sb\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.169736 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-dns-svc\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.170058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-nb\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.189726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxxd5\" (UniqueName: \"kubernetes.io/projected/c71148c2-9145-4207-841f-f4931e513205-kube-api-access-wxxd5\") pod \"dnsmasq-dns-86bcb6d7c9-hdsrp\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.270754 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-scripts\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.271365 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.271396 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/017a6c23-059b-4451-a487-660ef4033bc1-logs\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.271685 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glff5\" (UniqueName: \"kubernetes.io/projected/017a6c23-059b-4451-a487-660ef4033bc1-kube-api-access-glff5\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.271716 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.271753 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data-custom\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.271851 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/017a6c23-059b-4451-a487-660ef4033bc1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.272029 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/017a6c23-059b-4451-a487-660ef4033bc1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.272113 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/017a6c23-059b-4451-a487-660ef4033bc1-logs\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.274497 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.274622 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-scripts\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.275397 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.276069 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data-custom\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.289497 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glff5\" (UniqueName: \"kubernetes.io/projected/017a6c23-059b-4451-a487-660ef4033bc1-kube-api-access-glff5\") pod \"cinder-api-0\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.291252 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.416597 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.781111 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bcb6d7c9-hdsrp"] Jan 28 17:21:28 crc kubenswrapper[4811]: I0128 17:21:28.944956 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:21:28 crc kubenswrapper[4811]: W0128 17:21:28.951587 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod017a6c23_059b_4451_a487_660ef4033bc1.slice/crio-d6bd91870b457201b60754f93c018a258f6eced1f6e3b7f1e2223b66b4a876ed WatchSource:0}: Error finding container d6bd91870b457201b60754f93c018a258f6eced1f6e3b7f1e2223b66b4a876ed: Status 404 returned error can't find the container with id d6bd91870b457201b60754f93c018a258f6eced1f6e3b7f1e2223b66b4a876ed Jan 28 17:21:29 crc kubenswrapper[4811]: I0128 17:21:29.611377 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"017a6c23-059b-4451-a487-660ef4033bc1","Type":"ContainerStarted","Data":"5f6c34b1699ae0be2dcba3224e15a64455f7b0a01acfbec742ad2a0639ee1edb"} Jan 28 17:21:29 crc kubenswrapper[4811]: I0128 17:21:29.611699 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"017a6c23-059b-4451-a487-660ef4033bc1","Type":"ContainerStarted","Data":"d6bd91870b457201b60754f93c018a258f6eced1f6e3b7f1e2223b66b4a876ed"} Jan 28 17:21:29 crc kubenswrapper[4811]: I0128 17:21:29.614334 4811 generic.go:334] "Generic (PLEG): container finished" podID="c71148c2-9145-4207-841f-f4931e513205" containerID="d1a6680bd18c4477ea31fa78055a07373055e9f9a05a978ec8fd598122b6bddf" exitCode=0 Jan 28 17:21:29 crc kubenswrapper[4811]: I0128 17:21:29.614375 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" event={"ID":"c71148c2-9145-4207-841f-f4931e513205","Type":"ContainerDied","Data":"d1a6680bd18c4477ea31fa78055a07373055e9f9a05a978ec8fd598122b6bddf"} Jan 28 17:21:29 crc kubenswrapper[4811]: I0128 17:21:29.614401 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" event={"ID":"c71148c2-9145-4207-841f-f4931e513205","Type":"ContainerStarted","Data":"14dd51fca7e923d47708c7871d95b23c5ceabcbde212dfd1c4d579a1a5be1b3e"} Jan 28 17:21:30 crc kubenswrapper[4811]: I0128 17:21:30.629537 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"017a6c23-059b-4451-a487-660ef4033bc1","Type":"ContainerStarted","Data":"bb8659c4081958bfe7fa364c23e63d9e2bff60b485f1c51cf2ed5e0aeddce143"} Jan 28 17:21:30 crc kubenswrapper[4811]: I0128 17:21:30.629961 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 28 17:21:30 crc kubenswrapper[4811]: I0128 17:21:30.637831 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" event={"ID":"c71148c2-9145-4207-841f-f4931e513205","Type":"ContainerStarted","Data":"60cc5aaaf3f8c97762ef3c84c3fd38fb6521e312d568175c1f86c0dede5d2b4a"} Jan 28 17:21:30 crc kubenswrapper[4811]: I0128 17:21:30.638150 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:30 crc kubenswrapper[4811]: I0128 17:21:30.661018 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.660998216 podStartE2EDuration="2.660998216s" podCreationTimestamp="2026-01-28 17:21:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:30.648768122 +0000 UTC m=+5783.403131705" watchObservedRunningTime="2026-01-28 17:21:30.660998216 +0000 UTC m=+5783.415361799" Jan 28 17:21:37 crc kubenswrapper[4811]: I0128 17:21:37.339594 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:21:37 crc kubenswrapper[4811]: E0128 17:21:37.340517 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:21:38 crc kubenswrapper[4811]: I0128 17:21:38.292662 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:21:38 crc kubenswrapper[4811]: I0128 17:21:38.319549 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" podStartSLOduration=11.319524086 podStartE2EDuration="11.319524086s" podCreationTimestamp="2026-01-28 17:21:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:30.676274032 +0000 UTC m=+5783.430637635" watchObservedRunningTime="2026-01-28 17:21:38.319524086 +0000 UTC m=+5791.073887689" Jan 28 17:21:38 crc kubenswrapper[4811]: I0128 17:21:38.366767 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84f785c5f9-x2hfm"] Jan 28 17:21:38 crc kubenswrapper[4811]: I0128 17:21:38.367094 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerName="dnsmasq-dns" containerID="cri-o://0f126e6c7406b44f4628f87c09faf6cc16954432020ced28886950f01581ffd9" gracePeriod=10 Jan 28 17:21:38 crc kubenswrapper[4811]: I0128 17:21:38.705617 4811 generic.go:334] "Generic (PLEG): container finished" podID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerID="0f126e6c7406b44f4628f87c09faf6cc16954432020ced28886950f01581ffd9" exitCode=0 Jan 28 17:21:38 crc kubenswrapper[4811]: I0128 17:21:38.705916 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" event={"ID":"27af52ad-5ab2-40c7-bbe3-da26be337671","Type":"ContainerDied","Data":"0f126e6c7406b44f4628f87c09faf6cc16954432020ced28886950f01581ffd9"} Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.434449 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.572946 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-dns-svc\") pod \"27af52ad-5ab2-40c7-bbe3-da26be337671\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.573026 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-nb\") pod \"27af52ad-5ab2-40c7-bbe3-da26be337671\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.573086 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-config\") pod \"27af52ad-5ab2-40c7-bbe3-da26be337671\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.573134 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-sb\") pod \"27af52ad-5ab2-40c7-bbe3-da26be337671\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.573221 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b78ns\" (UniqueName: \"kubernetes.io/projected/27af52ad-5ab2-40c7-bbe3-da26be337671-kube-api-access-b78ns\") pod \"27af52ad-5ab2-40c7-bbe3-da26be337671\" (UID: \"27af52ad-5ab2-40c7-bbe3-da26be337671\") " Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.588805 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27af52ad-5ab2-40c7-bbe3-da26be337671-kube-api-access-b78ns" (OuterVolumeSpecName: "kube-api-access-b78ns") pod "27af52ad-5ab2-40c7-bbe3-da26be337671" (UID: "27af52ad-5ab2-40c7-bbe3-da26be337671"). InnerVolumeSpecName "kube-api-access-b78ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.676001 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b78ns\" (UniqueName: \"kubernetes.io/projected/27af52ad-5ab2-40c7-bbe3-da26be337671-kube-api-access-b78ns\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.715897 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" event={"ID":"27af52ad-5ab2-40c7-bbe3-da26be337671","Type":"ContainerDied","Data":"8404811a6d794ba2ab93b2b6f05abab1a0e76ef259b666ce9acf381df36b7b0d"} Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.715956 4811 scope.go:117] "RemoveContainer" containerID="0f126e6c7406b44f4628f87c09faf6cc16954432020ced28886950f01581ffd9" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.716038 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f785c5f9-x2hfm" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.777966 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "27af52ad-5ab2-40c7-bbe3-da26be337671" (UID: "27af52ad-5ab2-40c7-bbe3-da26be337671"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.779724 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "27af52ad-5ab2-40c7-bbe3-da26be337671" (UID: "27af52ad-5ab2-40c7-bbe3-da26be337671"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.794018 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-config" (OuterVolumeSpecName: "config") pod "27af52ad-5ab2-40c7-bbe3-da26be337671" (UID: "27af52ad-5ab2-40c7-bbe3-da26be337671"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.795069 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "27af52ad-5ab2-40c7-bbe3-da26be337671" (UID: "27af52ad-5ab2-40c7-bbe3-da26be337671"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.878459 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.878486 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.878496 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.878504 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27af52ad-5ab2-40c7-bbe3-da26be337671-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:39 crc kubenswrapper[4811]: I0128 17:21:39.890003 4811 scope.go:117] "RemoveContainer" containerID="9eae49f8d7d4246d4a2fc5358ecf59e5326701f58608eda160c946d11256adfa" Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.051007 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84f785c5f9-x2hfm"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.062297 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84f785c5f9-x2hfm"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.262149 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.262450 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerName="nova-scheduler-scheduler" containerID="cri-o://8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.291169 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.291449 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-log" containerID="cri-o://9ab9e5bffe658dac1fa8b9977f65977c2b05ef3b52f36519b307f272ab1dc9b7" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.291916 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-metadata" containerID="cri-o://7f5fa9f058e9294267306c7a5cc31d03d67e84936435375d194e2ca7fe5bb870" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.305852 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.306076 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-log" containerID="cri-o://cee7a9728b8d6d7b06e26029bbc1e36dfa9b9afd001fc2cf2f67008a69ef4a09" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.306494 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-api" containerID="cri-o://faf527c4d505cb091f08bf0e1f0bb44e3fe1cc078499bb4dd9817b9ab4d442f8" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.407642 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" path="/var/lib/kubelet/pods/27af52ad-5ab2-40c7-bbe3-da26be337671/volumes" Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.408830 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.408861 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.409834 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="0423b7af-d5f2-4cc2-88c8-da8756cbc46d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.414799 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="aedc4187-66e7-4af9-b508-5d6282f4a937" containerName="nova-cell0-conductor-conductor" containerID="cri-o://9b65aefbe5958d180fb7ea04bfe17f85779eeaee70d605e4ca9ba2f51ffe4f87" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.460580 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.460950 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="633327c1-9418-4e00-b72f-3c742b26243c" containerName="nova-cell1-conductor-conductor" containerID="cri-o://321bd8c868e7bfa317d34c8878c89a986acbcd1af0b832e5fdaef2fb6a03317e" gracePeriod=30 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.729165 4811 generic.go:334] "Generic (PLEG): container finished" podID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerID="9ab9e5bffe658dac1fa8b9977f65977c2b05ef3b52f36519b307f272ab1dc9b7" exitCode=143 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.729236 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41723cdb-d2fc-4716-9df8-2210e796b3aa","Type":"ContainerDied","Data":"9ab9e5bffe658dac1fa8b9977f65977c2b05ef3b52f36519b307f272ab1dc9b7"} Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.732646 4811 generic.go:334] "Generic (PLEG): container finished" podID="3c5dba6c-abfe-4686-895f-e051268d21db" containerID="cee7a9728b8d6d7b06e26029bbc1e36dfa9b9afd001fc2cf2f67008a69ef4a09" exitCode=143 Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.732731 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5dba6c-abfe-4686-895f-e051268d21db","Type":"ContainerDied","Data":"cee7a9728b8d6d7b06e26029bbc1e36dfa9b9afd001fc2cf2f67008a69ef4a09"} Jan 28 17:21:40 crc kubenswrapper[4811]: I0128 17:21:40.742827 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.097385 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0423b7af_d5f2_4cc2_88c8_da8756cbc46d.slice/crio-faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0423b7af_d5f2_4cc2_88c8_da8756cbc46d.slice/crio-conmon-faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785.scope\": RecentStats: unable to find data in memory cache]" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.374759 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.413654 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-combined-ca-bundle\") pod \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.413871 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxb8p\" (UniqueName: \"kubernetes.io/projected/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-kube-api-access-pxb8p\") pod \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.414003 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-config-data\") pod \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\" (UID: \"0423b7af-d5f2-4cc2-88c8-da8756cbc46d\") " Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.427966 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-kube-api-access-pxb8p" (OuterVolumeSpecName: "kube-api-access-pxb8p") pod "0423b7af-d5f2-4cc2-88c8-da8756cbc46d" (UID: "0423b7af-d5f2-4cc2-88c8-da8756cbc46d"). InnerVolumeSpecName "kube-api-access-pxb8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.446933 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0423b7af-d5f2-4cc2-88c8-da8756cbc46d" (UID: "0423b7af-d5f2-4cc2-88c8-da8756cbc46d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.464602 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-config-data" (OuterVolumeSpecName: "config-data") pod "0423b7af-d5f2-4cc2-88c8-da8756cbc46d" (UID: "0423b7af-d5f2-4cc2-88c8-da8756cbc46d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.517273 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.517577 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.517590 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxb8p\" (UniqueName: \"kubernetes.io/projected/0423b7af-d5f2-4cc2-88c8-da8756cbc46d-kube-api-access-pxb8p\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.758140 4811 generic.go:334] "Generic (PLEG): container finished" podID="633327c1-9418-4e00-b72f-3c742b26243c" containerID="321bd8c868e7bfa317d34c8878c89a986acbcd1af0b832e5fdaef2fb6a03317e" exitCode=0 Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.758253 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"633327c1-9418-4e00-b72f-3c742b26243c","Type":"ContainerDied","Data":"321bd8c868e7bfa317d34c8878c89a986acbcd1af0b832e5fdaef2fb6a03317e"} Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.761154 4811 generic.go:334] "Generic (PLEG): container finished" podID="0423b7af-d5f2-4cc2-88c8-da8756cbc46d" containerID="faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785" exitCode=0 Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.761210 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0423b7af-d5f2-4cc2-88c8-da8756cbc46d","Type":"ContainerDied","Data":"faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785"} Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.761235 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0423b7af-d5f2-4cc2-88c8-da8756cbc46d","Type":"ContainerDied","Data":"7c4136f04b7296cc35fd76b0ed5b2e973e50d7585ab3605250490fca4a46853b"} Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.761253 4811 scope.go:117] "RemoveContainer" containerID="faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.761402 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.778858 4811 generic.go:334] "Generic (PLEG): container finished" podID="aedc4187-66e7-4af9-b508-5d6282f4a937" containerID="9b65aefbe5958d180fb7ea04bfe17f85779eeaee70d605e4ca9ba2f51ffe4f87" exitCode=0 Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.778924 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"aedc4187-66e7-4af9-b508-5d6282f4a937","Type":"ContainerDied","Data":"9b65aefbe5958d180fb7ea04bfe17f85779eeaee70d605e4ca9ba2f51ffe4f87"} Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.789947 4811 scope.go:117] "RemoveContainer" containerID="faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785" Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.790895 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785\": container with ID starting with faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785 not found: ID does not exist" containerID="faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.802542 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785"} err="failed to get container status \"faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785\": rpc error: code = NotFound desc = could not find container \"faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785\": container with ID starting with faa821a59523ad6464873b49854f734017f689787c28474c13b09fb8e16e3785 not found: ID does not exist" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.834850 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.862263 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.864664 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.866819 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.866883 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerName="nova-scheduler-scheduler" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.872562 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.910916 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.914608 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.915060 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="633327c1-9418-4e00-b72f-3c742b26243c" containerName="nova-cell1-conductor-conductor" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915077 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="633327c1-9418-4e00-b72f-3c742b26243c" containerName="nova-cell1-conductor-conductor" Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.915111 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0423b7af-d5f2-4cc2-88c8-da8756cbc46d" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915121 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0423b7af-d5f2-4cc2-88c8-da8756cbc46d" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.915134 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerName="dnsmasq-dns" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915142 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerName="dnsmasq-dns" Jan 28 17:21:41 crc kubenswrapper[4811]: E0128 17:21:41.915173 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerName="init" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915182 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerName="init" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915379 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="633327c1-9418-4e00-b72f-3c742b26243c" containerName="nova-cell1-conductor-conductor" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915403 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0423b7af-d5f2-4cc2-88c8-da8756cbc46d" containerName="nova-cell1-novncproxy-novncproxy" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.915504 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="27af52ad-5ab2-40c7-bbe3-da26be337671" containerName="dnsmasq-dns" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.916212 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.918396 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 28 17:21:41 crc kubenswrapper[4811]: I0128 17:21:41.934631 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.012020 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.037794 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-config-data\") pod \"633327c1-9418-4e00-b72f-3c742b26243c\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.037902 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-combined-ca-bundle\") pod \"633327c1-9418-4e00-b72f-3c742b26243c\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.038083 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjp5m\" (UniqueName: \"kubernetes.io/projected/633327c1-9418-4e00-b72f-3c742b26243c-kube-api-access-cjp5m\") pod \"633327c1-9418-4e00-b72f-3c742b26243c\" (UID: \"633327c1-9418-4e00-b72f-3c742b26243c\") " Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.039010 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcbpb\" (UniqueName: \"kubernetes.io/projected/915b65f1-d386-4113-b209-767e3d1d1f07-kube-api-access-hcbpb\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.039343 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/915b65f1-d386-4113-b209-767e3d1d1f07-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.039522 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/915b65f1-d386-4113-b209-767e3d1d1f07-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.047782 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/633327c1-9418-4e00-b72f-3c742b26243c-kube-api-access-cjp5m" (OuterVolumeSpecName: "kube-api-access-cjp5m") pod "633327c1-9418-4e00-b72f-3c742b26243c" (UID: "633327c1-9418-4e00-b72f-3c742b26243c"). InnerVolumeSpecName "kube-api-access-cjp5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.065599 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "633327c1-9418-4e00-b72f-3c742b26243c" (UID: "633327c1-9418-4e00-b72f-3c742b26243c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.075090 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-config-data" (OuterVolumeSpecName: "config-data") pod "633327c1-9418-4e00-b72f-3c742b26243c" (UID: "633327c1-9418-4e00-b72f-3c742b26243c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141005 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-combined-ca-bundle\") pod \"aedc4187-66e7-4af9-b508-5d6282f4a937\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141073 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf77b\" (UniqueName: \"kubernetes.io/projected/aedc4187-66e7-4af9-b508-5d6282f4a937-kube-api-access-bf77b\") pod \"aedc4187-66e7-4af9-b508-5d6282f4a937\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141247 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-config-data\") pod \"aedc4187-66e7-4af9-b508-5d6282f4a937\" (UID: \"aedc4187-66e7-4af9-b508-5d6282f4a937\") " Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141523 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/915b65f1-d386-4113-b209-767e3d1d1f07-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141611 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/915b65f1-d386-4113-b209-767e3d1d1f07-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141681 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcbpb\" (UniqueName: \"kubernetes.io/projected/915b65f1-d386-4113-b209-767e3d1d1f07-kube-api-access-hcbpb\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141725 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141736 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/633327c1-9418-4e00-b72f-3c742b26243c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.141745 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjp5m\" (UniqueName: \"kubernetes.io/projected/633327c1-9418-4e00-b72f-3c742b26243c-kube-api-access-cjp5m\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.146388 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/915b65f1-d386-4113-b209-767e3d1d1f07-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.146979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/915b65f1-d386-4113-b209-767e3d1d1f07-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.155954 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aedc4187-66e7-4af9-b508-5d6282f4a937-kube-api-access-bf77b" (OuterVolumeSpecName: "kube-api-access-bf77b") pod "aedc4187-66e7-4af9-b508-5d6282f4a937" (UID: "aedc4187-66e7-4af9-b508-5d6282f4a937"). InnerVolumeSpecName "kube-api-access-bf77b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.160796 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcbpb\" (UniqueName: \"kubernetes.io/projected/915b65f1-d386-4113-b209-767e3d1d1f07-kube-api-access-hcbpb\") pod \"nova-cell1-novncproxy-0\" (UID: \"915b65f1-d386-4113-b209-767e3d1d1f07\") " pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.174324 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aedc4187-66e7-4af9-b508-5d6282f4a937" (UID: "aedc4187-66e7-4af9-b508-5d6282f4a937"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.183573 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-config-data" (OuterVolumeSpecName: "config-data") pod "aedc4187-66e7-4af9-b508-5d6282f4a937" (UID: "aedc4187-66e7-4af9-b508-5d6282f4a937"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.243136 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.243170 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedc4187-66e7-4af9-b508-5d6282f4a937-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.243181 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf77b\" (UniqueName: \"kubernetes.io/projected/aedc4187-66e7-4af9-b508-5d6282f4a937-kube-api-access-bf77b\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.244804 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.356285 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0423b7af-d5f2-4cc2-88c8-da8756cbc46d" path="/var/lib/kubelet/pods/0423b7af-d5f2-4cc2-88c8-da8756cbc46d/volumes" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.679179 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: W0128 17:21:42.690206 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod915b65f1_d386_4113_b209_767e3d1d1f07.slice/crio-ec891d8a856da8ce3bf31267144a1322b5a2bfaf94f5c389c686fd3d9e6662f1 WatchSource:0}: Error finding container ec891d8a856da8ce3bf31267144a1322b5a2bfaf94f5c389c686fd3d9e6662f1: Status 404 returned error can't find the container with id ec891d8a856da8ce3bf31267144a1322b5a2bfaf94f5c389c686fd3d9e6662f1 Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.793331 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"915b65f1-d386-4113-b209-767e3d1d1f07","Type":"ContainerStarted","Data":"ec891d8a856da8ce3bf31267144a1322b5a2bfaf94f5c389c686fd3d9e6662f1"} Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.799500 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"633327c1-9418-4e00-b72f-3c742b26243c","Type":"ContainerDied","Data":"9c71019d16229c944b6eb5b9b90cd5b6ce0a91b1e8f24c2044264db44019cf56"} Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.799554 4811 scope.go:117] "RemoveContainer" containerID="321bd8c868e7bfa317d34c8878c89a986acbcd1af0b832e5fdaef2fb6a03317e" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.799679 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.808001 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"aedc4187-66e7-4af9-b508-5d6282f4a937","Type":"ContainerDied","Data":"caa2f50b99802eff3eed940ec29f6b935f7f179d713280592ec4eca81f5ba1fe"} Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.808046 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.868655 4811 scope.go:117] "RemoveContainer" containerID="9b65aefbe5958d180fb7ea04bfe17f85779eeaee70d605e4ca9ba2f51ffe4f87" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.878336 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.893760 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.910491 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: E0128 17:21:42.910954 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aedc4187-66e7-4af9-b508-5d6282f4a937" containerName="nova-cell0-conductor-conductor" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.910978 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="aedc4187-66e7-4af9-b508-5d6282f4a937" containerName="nova-cell0-conductor-conductor" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.911204 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="aedc4187-66e7-4af9-b508-5d6282f4a937" containerName="nova-cell0-conductor-conductor" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.911964 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.914785 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.935760 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.946140 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.956674 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.964809 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhncz\" (UniqueName: \"kubernetes.io/projected/3783b6d8-2a96-4199-98ae-03f37d52317c-kube-api-access-zhncz\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.965399 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.965640 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.972197 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.973920 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.975709 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 28 17:21:42 crc kubenswrapper[4811]: I0128 17:21:42.986383 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.067956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z42g8\" (UniqueName: \"kubernetes.io/projected/d19b204f-1218-4aa3-93c9-f09944ae4bc0-kube-api-access-z42g8\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.068033 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.068113 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.068305 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhncz\" (UniqueName: \"kubernetes.io/projected/3783b6d8-2a96-4199-98ae-03f37d52317c-kube-api-access-zhncz\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.068363 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.068389 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.072689 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.087378 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhncz\" (UniqueName: \"kubernetes.io/projected/3783b6d8-2a96-4199-98ae-03f37d52317c-kube-api-access-zhncz\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.087379 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.170220 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.170362 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z42g8\" (UniqueName: \"kubernetes.io/projected/d19b204f-1218-4aa3-93c9-f09944ae4bc0-kube-api-access-z42g8\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.170388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.173553 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.174574 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.186235 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z42g8\" (UniqueName: \"kubernetes.io/projected/d19b204f-1218-4aa3-93c9-f09944ae4bc0-kube-api-access-z42g8\") pod \"nova-cell1-conductor-0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.231898 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.296391 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.477892 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": read tcp 10.217.0.2:48648->10.217.1.75:8775: read: connection reset by peer" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.478179 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": read tcp 10.217.0.2:48658->10.217.1.75:8775: read: connection reset by peer" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.520358 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": read tcp 10.217.0.2:46956->10.217.1.74:8774: read: connection reset by peer" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.520726 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": read tcp 10.217.0.2:46940->10.217.1.74:8774: read: connection reset by peer" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.724084 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.819963 4811 generic.go:334] "Generic (PLEG): container finished" podID="3c5dba6c-abfe-4686-895f-e051268d21db" containerID="faf527c4d505cb091f08bf0e1f0bb44e3fe1cc078499bb4dd9817b9ab4d442f8" exitCode=0 Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.820252 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5dba6c-abfe-4686-895f-e051268d21db","Type":"ContainerDied","Data":"faf527c4d505cb091f08bf0e1f0bb44e3fe1cc078499bb4dd9817b9ab4d442f8"} Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.821406 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"915b65f1-d386-4113-b209-767e3d1d1f07","Type":"ContainerStarted","Data":"fade9a36f518f7613f592b187f08c3d79e0d49d4eee1b4eaedcc24e449eab28f"} Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.824414 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3783b6d8-2a96-4199-98ae-03f37d52317c","Type":"ContainerStarted","Data":"5c5da60e7a4601c022af8ce3f81db98b9dc14129a9455cb41104c95751139624"} Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.830575 4811 generic.go:334] "Generic (PLEG): container finished" podID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerID="7f5fa9f058e9294267306c7a5cc31d03d67e84936435375d194e2ca7fe5bb870" exitCode=0 Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.830618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41723cdb-d2fc-4716-9df8-2210e796b3aa","Type":"ContainerDied","Data":"7f5fa9f058e9294267306c7a5cc31d03d67e84936435375d194e2ca7fe5bb870"} Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.859911 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.859889168 podStartE2EDuration="2.859889168s" podCreationTimestamp="2026-01-28 17:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:43.841351643 +0000 UTC m=+5796.595715226" watchObservedRunningTime="2026-01-28 17:21:43.859889168 +0000 UTC m=+5796.614252751" Jan 28 17:21:43 crc kubenswrapper[4811]: I0128 17:21:43.889350 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.356285 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="633327c1-9418-4e00-b72f-3c742b26243c" path="/var/lib/kubelet/pods/633327c1-9418-4e00-b72f-3c742b26243c/volumes" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.357196 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aedc4187-66e7-4af9-b508-5d6282f4a937" path="/var/lib/kubelet/pods/aedc4187-66e7-4af9-b508-5d6282f4a937/volumes" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.450271 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.504273 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sgcp\" (UniqueName: \"kubernetes.io/projected/41723cdb-d2fc-4716-9df8-2210e796b3aa-kube-api-access-6sgcp\") pod \"41723cdb-d2fc-4716-9df8-2210e796b3aa\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.504566 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-config-data\") pod \"41723cdb-d2fc-4716-9df8-2210e796b3aa\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.504690 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41723cdb-d2fc-4716-9df8-2210e796b3aa-logs\") pod \"41723cdb-d2fc-4716-9df8-2210e796b3aa\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.504873 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-combined-ca-bundle\") pod \"41723cdb-d2fc-4716-9df8-2210e796b3aa\" (UID: \"41723cdb-d2fc-4716-9df8-2210e796b3aa\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.511804 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41723cdb-d2fc-4716-9df8-2210e796b3aa-logs" (OuterVolumeSpecName: "logs") pod "41723cdb-d2fc-4716-9df8-2210e796b3aa" (UID: "41723cdb-d2fc-4716-9df8-2210e796b3aa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.512055 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41723cdb-d2fc-4716-9df8-2210e796b3aa-kube-api-access-6sgcp" (OuterVolumeSpecName: "kube-api-access-6sgcp") pod "41723cdb-d2fc-4716-9df8-2210e796b3aa" (UID: "41723cdb-d2fc-4716-9df8-2210e796b3aa"). InnerVolumeSpecName "kube-api-access-6sgcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.554948 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41723cdb-d2fc-4716-9df8-2210e796b3aa" (UID: "41723cdb-d2fc-4716-9df8-2210e796b3aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.564534 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-config-data" (OuterVolumeSpecName: "config-data") pod "41723cdb-d2fc-4716-9df8-2210e796b3aa" (UID: "41723cdb-d2fc-4716-9df8-2210e796b3aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.606710 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.606735 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sgcp\" (UniqueName: \"kubernetes.io/projected/41723cdb-d2fc-4716-9df8-2210e796b3aa-kube-api-access-6sgcp\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.606746 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41723cdb-d2fc-4716-9df8-2210e796b3aa-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.606756 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41723cdb-d2fc-4716-9df8-2210e796b3aa-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.694220 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.809262 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5dba6c-abfe-4686-895f-e051268d21db-logs\") pod \"3c5dba6c-abfe-4686-895f-e051268d21db\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.809792 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c927b\" (UniqueName: \"kubernetes.io/projected/3c5dba6c-abfe-4686-895f-e051268d21db-kube-api-access-c927b\") pod \"3c5dba6c-abfe-4686-895f-e051268d21db\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.809860 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-combined-ca-bundle\") pod \"3c5dba6c-abfe-4686-895f-e051268d21db\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.809939 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-config-data\") pod \"3c5dba6c-abfe-4686-895f-e051268d21db\" (UID: \"3c5dba6c-abfe-4686-895f-e051268d21db\") " Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.809999 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c5dba6c-abfe-4686-895f-e051268d21db-logs" (OuterVolumeSpecName: "logs") pod "3c5dba6c-abfe-4686-895f-e051268d21db" (UID: "3c5dba6c-abfe-4686-895f-e051268d21db"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.810310 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5dba6c-abfe-4686-895f-e051268d21db-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.818286 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c5dba6c-abfe-4686-895f-e051268d21db-kube-api-access-c927b" (OuterVolumeSpecName: "kube-api-access-c927b") pod "3c5dba6c-abfe-4686-895f-e051268d21db" (UID: "3c5dba6c-abfe-4686-895f-e051268d21db"). InnerVolumeSpecName "kube-api-access-c927b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.853146 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41723cdb-d2fc-4716-9df8-2210e796b3aa","Type":"ContainerDied","Data":"ba6c4ccb0fda13e0da137a20855ff3df2b3e7213e2b620cc07aa65d7f432ad41"} Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.853203 4811 scope.go:117] "RemoveContainer" containerID="7f5fa9f058e9294267306c7a5cc31d03d67e84936435375d194e2ca7fe5bb870" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.853349 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.871696 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.873604 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5dba6c-abfe-4686-895f-e051268d21db","Type":"ContainerDied","Data":"08585d3861746d90706c8d577ba84848623cff84c7c6f3278f3ee288185572b0"} Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.875761 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-config-data" (OuterVolumeSpecName: "config-data") pod "3c5dba6c-abfe-4686-895f-e051268d21db" (UID: "3c5dba6c-abfe-4686-895f-e051268d21db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.878963 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3783b6d8-2a96-4199-98ae-03f37d52317c","Type":"ContainerStarted","Data":"2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf"} Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.880392 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.895176 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d19b204f-1218-4aa3-93c9-f09944ae4bc0","Type":"ContainerStarted","Data":"66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a"} Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.895293 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d19b204f-1218-4aa3-93c9-f09944ae4bc0","Type":"ContainerStarted","Data":"b230736fc32efd1466f6fe7e97cbcc668606ddd5440375308d123be1a76ac781"} Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.895356 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.911975 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c927b\" (UniqueName: \"kubernetes.io/projected/3c5dba6c-abfe-4686-895f-e051268d21db-kube-api-access-c927b\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.912024 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.913818 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.913802551 podStartE2EDuration="2.913802551s" podCreationTimestamp="2026-01-28 17:21:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:44.907692324 +0000 UTC m=+5797.662055907" watchObservedRunningTime="2026-01-28 17:21:44.913802551 +0000 UTC m=+5797.668166134" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.914028 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c5dba6c-abfe-4686-895f-e051268d21db" (UID: "3c5dba6c-abfe-4686-895f-e051268d21db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.927715 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.927698719 podStartE2EDuration="2.927698719s" podCreationTimestamp="2026-01-28 17:21:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:44.921613303 +0000 UTC m=+5797.675976906" watchObservedRunningTime="2026-01-28 17:21:44.927698719 +0000 UTC m=+5797.682062292" Jan 28 17:21:44 crc kubenswrapper[4811]: I0128 17:21:44.969577 4811 scope.go:117] "RemoveContainer" containerID="9ab9e5bffe658dac1fa8b9977f65977c2b05ef3b52f36519b307f272ab1dc9b7" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.005560 4811 scope.go:117] "RemoveContainer" containerID="faf527c4d505cb091f08bf0e1f0bb44e3fe1cc078499bb4dd9817b9ab4d442f8" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.013227 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5dba6c-abfe-4686-895f-e051268d21db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.014197 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.038023 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.048632 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: E0128 17:21:45.049168 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-log" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049191 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-log" Jan 28 17:21:45 crc kubenswrapper[4811]: E0128 17:21:45.049210 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-metadata" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049220 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-metadata" Jan 28 17:21:45 crc kubenswrapper[4811]: E0128 17:21:45.049241 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-api" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049251 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-api" Jan 28 17:21:45 crc kubenswrapper[4811]: E0128 17:21:45.049273 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-log" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049281 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-log" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049670 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-log" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049702 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-log" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049720 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" containerName="nova-api-api" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.049738 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" containerName="nova-metadata-metadata" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.051037 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.052919 4811 scope.go:117] "RemoveContainer" containerID="cee7a9728b8d6d7b06e26029bbc1e36dfa9b9afd001fc2cf2f67008a69ef4a09" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.054955 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.060310 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.115356 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f7aba-d957-49d8-97ec-8a7044d24867-logs\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.115403 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-config-data\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.115560 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.115585 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2jks\" (UniqueName: \"kubernetes.io/projected/991f7aba-d957-49d8-97ec-8a7044d24867-kube-api-access-k2jks\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.222257 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.222324 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2jks\" (UniqueName: \"kubernetes.io/projected/991f7aba-d957-49d8-97ec-8a7044d24867-kube-api-access-k2jks\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.222467 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f7aba-d957-49d8-97ec-8a7044d24867-logs\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.222508 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-config-data\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.227021 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f7aba-d957-49d8-97ec-8a7044d24867-logs\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.232566 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.238158 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-config-data\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.239462 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.247237 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2jks\" (UniqueName: \"kubernetes.io/projected/991f7aba-d957-49d8-97ec-8a7044d24867-kube-api-access-k2jks\") pod \"nova-metadata-0\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.251878 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.262907 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.266195 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.272383 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.295564 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.324374 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.324630 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz9t2\" (UniqueName: \"kubernetes.io/projected/b6fd0120-4e8a-4982-b126-29b86e3c8e57-kube-api-access-cz9t2\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.324773 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-config-data\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.324951 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6fd0120-4e8a-4982-b126-29b86e3c8e57-logs\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.378857 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.426112 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz9t2\" (UniqueName: \"kubernetes.io/projected/b6fd0120-4e8a-4982-b126-29b86e3c8e57-kube-api-access-cz9t2\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.426482 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-config-data\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.427261 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6fd0120-4e8a-4982-b126-29b86e3c8e57-logs\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.427347 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.430208 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6fd0120-4e8a-4982-b126-29b86e3c8e57-logs\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.431971 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-config-data\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.438015 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.445828 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz9t2\" (UniqueName: \"kubernetes.io/projected/b6fd0120-4e8a-4982-b126-29b86e3c8e57-kube-api-access-cz9t2\") pod \"nova-api-0\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.601010 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.856350 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 17:21:45 crc kubenswrapper[4811]: W0128 17:21:45.861449 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod991f7aba_d957_49d8_97ec_8a7044d24867.slice/crio-4895d28fc05599105f93465da42df124c752a70482f98f14380da6aa405d5ee5 WatchSource:0}: Error finding container 4895d28fc05599105f93465da42df124c752a70482f98f14380da6aa405d5ee5: Status 404 returned error can't find the container with id 4895d28fc05599105f93465da42df124c752a70482f98f14380da6aa405d5ee5 Jan 28 17:21:45 crc kubenswrapper[4811]: I0128 17:21:45.908970 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"991f7aba-d957-49d8-97ec-8a7044d24867","Type":"ContainerStarted","Data":"4895d28fc05599105f93465da42df124c752a70482f98f14380da6aa405d5ee5"} Jan 28 17:21:46 crc kubenswrapper[4811]: W0128 17:21:46.082343 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6fd0120_4e8a_4982_b126_29b86e3c8e57.slice/crio-db85045ec46fa3ed8c4b0288d323bc21a784bce10b6b6f8774491e477b9f1711 WatchSource:0}: Error finding container db85045ec46fa3ed8c4b0288d323bc21a784bce10b6b6f8774491e477b9f1711: Status 404 returned error can't find the container with id db85045ec46fa3ed8c4b0288d323bc21a784bce10b6b6f8774491e477b9f1711 Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.085299 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.350756 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c5dba6c-abfe-4686-895f-e051268d21db" path="/var/lib/kubelet/pods/3c5dba6c-abfe-4686-895f-e051268d21db/volumes" Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.351456 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41723cdb-d2fc-4716-9df8-2210e796b3aa" path="/var/lib/kubelet/pods/41723cdb-d2fc-4716-9df8-2210e796b3aa/volumes" Jan 28 17:21:46 crc kubenswrapper[4811]: E0128 17:21:46.856764 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 17:21:46 crc kubenswrapper[4811]: E0128 17:21:46.860063 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 17:21:46 crc kubenswrapper[4811]: E0128 17:21:46.861374 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 28 17:21:46 crc kubenswrapper[4811]: E0128 17:21:46.861769 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerName="nova-scheduler-scheduler" Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.921283 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b6fd0120-4e8a-4982-b126-29b86e3c8e57","Type":"ContainerStarted","Data":"3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781"} Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.921325 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b6fd0120-4e8a-4982-b126-29b86e3c8e57","Type":"ContainerStarted","Data":"5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b"} Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.921335 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b6fd0120-4e8a-4982-b126-29b86e3c8e57","Type":"ContainerStarted","Data":"db85045ec46fa3ed8c4b0288d323bc21a784bce10b6b6f8774491e477b9f1711"} Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.924989 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"991f7aba-d957-49d8-97ec-8a7044d24867","Type":"ContainerStarted","Data":"b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e"} Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.925058 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"991f7aba-d957-49d8-97ec-8a7044d24867","Type":"ContainerStarted","Data":"5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692"} Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.951029 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.951001523 podStartE2EDuration="1.951001523s" podCreationTimestamp="2026-01-28 17:21:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:46.946726267 +0000 UTC m=+5799.701089850" watchObservedRunningTime="2026-01-28 17:21:46.951001523 +0000 UTC m=+5799.705365136" Jan 28 17:21:46 crc kubenswrapper[4811]: I0128 17:21:46.979963 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.979939562 podStartE2EDuration="2.979939562s" podCreationTimestamp="2026-01-28 17:21:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:46.97327153 +0000 UTC m=+5799.727635113" watchObservedRunningTime="2026-01-28 17:21:46.979939562 +0000 UTC m=+5799.734303145" Jan 28 17:21:47 crc kubenswrapper[4811]: I0128 17:21:47.246113 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:49 crc kubenswrapper[4811]: I0128 17:21:49.340750 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:21:49 crc kubenswrapper[4811]: E0128 17:21:49.341553 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.379587 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.379906 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.785172 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.841203 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wctfg\" (UniqueName: \"kubernetes.io/projected/a69222f8-b291-45e4-9be5-b7860cadb5db-kube-api-access-wctfg\") pod \"a69222f8-b291-45e4-9be5-b7860cadb5db\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.841290 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-combined-ca-bundle\") pod \"a69222f8-b291-45e4-9be5-b7860cadb5db\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.841347 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-config-data\") pod \"a69222f8-b291-45e4-9be5-b7860cadb5db\" (UID: \"a69222f8-b291-45e4-9be5-b7860cadb5db\") " Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.847607 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a69222f8-b291-45e4-9be5-b7860cadb5db-kube-api-access-wctfg" (OuterVolumeSpecName: "kube-api-access-wctfg") pod "a69222f8-b291-45e4-9be5-b7860cadb5db" (UID: "a69222f8-b291-45e4-9be5-b7860cadb5db"). InnerVolumeSpecName "kube-api-access-wctfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.866681 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-config-data" (OuterVolumeSpecName: "config-data") pod "a69222f8-b291-45e4-9be5-b7860cadb5db" (UID: "a69222f8-b291-45e4-9be5-b7860cadb5db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.874066 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a69222f8-b291-45e4-9be5-b7860cadb5db" (UID: "a69222f8-b291-45e4-9be5-b7860cadb5db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.943853 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wctfg\" (UniqueName: \"kubernetes.io/projected/a69222f8-b291-45e4-9be5-b7860cadb5db-kube-api-access-wctfg\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.943904 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.943914 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a69222f8-b291-45e4-9be5-b7860cadb5db-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.958511 4811 generic.go:334] "Generic (PLEG): container finished" podID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" exitCode=0 Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.958564 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a69222f8-b291-45e4-9be5-b7860cadb5db","Type":"ContainerDied","Data":"8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d"} Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.958628 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a69222f8-b291-45e4-9be5-b7860cadb5db","Type":"ContainerDied","Data":"0e1ee62884a166d1544e02cda354db371fc10502a2df77e8c644569a67fb9797"} Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.958667 4811 scope.go:117] "RemoveContainer" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.958592 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:21:50 crc kubenswrapper[4811]: I0128 17:21:50.997750 4811 scope.go:117] "RemoveContainer" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" Jan 28 17:21:51 crc kubenswrapper[4811]: E0128 17:21:51.004837 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d\": container with ID starting with 8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d not found: ID does not exist" containerID="8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.004899 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d"} err="failed to get container status \"8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d\": rpc error: code = NotFound desc = could not find container \"8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d\": container with ID starting with 8bf851e64771cb1f848e36cdc9f8ef4ff73c948459eb7dd538f6d3bde876f14d not found: ID does not exist" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.006216 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.022788 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.046328 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:21:51 crc kubenswrapper[4811]: E0128 17:21:51.046887 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerName="nova-scheduler-scheduler" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.046914 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerName="nova-scheduler-scheduler" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.047148 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" containerName="nova-scheduler-scheduler" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.047990 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.050776 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.058817 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.148416 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmrv8\" (UniqueName: \"kubernetes.io/projected/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-kube-api-access-rmrv8\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.148577 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-config-data\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.148606 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.249907 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmrv8\" (UniqueName: \"kubernetes.io/projected/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-kube-api-access-rmrv8\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.250014 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-config-data\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.250033 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.254204 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-config-data\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.262593 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.264683 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmrv8\" (UniqueName: \"kubernetes.io/projected/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-kube-api-access-rmrv8\") pod \"nova-scheduler-0\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.398295 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.832970 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 17:21:51 crc kubenswrapper[4811]: I0128 17:21:51.972496 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a","Type":"ContainerStarted","Data":"8db34fd78f6aa480cfb08c5a3acb978f204ccd67612dd055980682a13b247161"} Jan 28 17:21:52 crc kubenswrapper[4811]: I0128 17:21:52.246311 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:52 crc kubenswrapper[4811]: I0128 17:21:52.257819 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:52 crc kubenswrapper[4811]: I0128 17:21:52.366221 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a69222f8-b291-45e4-9be5-b7860cadb5db" path="/var/lib/kubelet/pods/a69222f8-b291-45e4-9be5-b7860cadb5db/volumes" Jan 28 17:21:52 crc kubenswrapper[4811]: I0128 17:21:52.986340 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a","Type":"ContainerStarted","Data":"dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be"} Jan 28 17:21:52 crc kubenswrapper[4811]: I0128 17:21:52.998883 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 28 17:21:53 crc kubenswrapper[4811]: I0128 17:21:53.011782 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.011762789 podStartE2EDuration="3.011762789s" podCreationTimestamp="2026-01-28 17:21:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:21:53.007087952 +0000 UTC m=+5805.761451545" watchObservedRunningTime="2026-01-28 17:21:53.011762789 +0000 UTC m=+5805.766126372" Jan 28 17:21:53 crc kubenswrapper[4811]: I0128 17:21:53.269896 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 28 17:21:53 crc kubenswrapper[4811]: I0128 17:21:53.326358 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 28 17:21:55 crc kubenswrapper[4811]: I0128 17:21:55.380103 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 17:21:55 crc kubenswrapper[4811]: I0128 17:21:55.380506 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 17:21:55 crc kubenswrapper[4811]: I0128 17:21:55.601761 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 17:21:55 crc kubenswrapper[4811]: I0128 17:21:55.601821 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 17:21:56 crc kubenswrapper[4811]: I0128 17:21:56.399973 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 17:21:56 crc kubenswrapper[4811]: I0128 17:21:56.462697 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:21:56 crc kubenswrapper[4811]: I0128 17:21:56.462854 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:21:56 crc kubenswrapper[4811]: I0128 17:21:56.684735 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.86:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:21:56 crc kubenswrapper[4811]: I0128 17:21:56.685161 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.86:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 17:21:57 crc kubenswrapper[4811]: E0128 17:21:57.992132 4811 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.233:35368->38.102.83.233:37263: write tcp 38.102.83.233:35368->38.102.83.233:37263: write: broken pipe Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.039325 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.044717 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.047231 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.060679 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.101407 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.101830 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.102107 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.102302 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.102374 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.102442 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9hv5\" (UniqueName: \"kubernetes.io/projected/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-kube-api-access-f9hv5\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203600 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203661 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203700 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9hv5\" (UniqueName: \"kubernetes.io/projected/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-kube-api-access-f9hv5\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203762 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203789 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203829 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.203891 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.210103 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.210138 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.210492 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.220327 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.220641 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9hv5\" (UniqueName: \"kubernetes.io/projected/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-kube-api-access-f9hv5\") pod \"cinder-scheduler-0\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.378920 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 17:21:59 crc kubenswrapper[4811]: W0128 17:21:59.832659 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecc03a47_4c6b_41e4_aa08_1c87f51c7eaa.slice/crio-d6bdcb0c9c7daafccab9df4ff954efebe3dcdeb5f0876c7e3bedf6277254340f WatchSource:0}: Error finding container d6bdcb0c9c7daafccab9df4ff954efebe3dcdeb5f0876c7e3bedf6277254340f: Status 404 returned error can't find the container with id d6bdcb0c9c7daafccab9df4ff954efebe3dcdeb5f0876c7e3bedf6277254340f Jan 28 17:21:59 crc kubenswrapper[4811]: I0128 17:21:59.835025 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:00 crc kubenswrapper[4811]: I0128 17:22:00.049714 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa","Type":"ContainerStarted","Data":"d6bdcb0c9c7daafccab9df4ff954efebe3dcdeb5f0876c7e3bedf6277254340f"} Jan 28 17:22:00 crc kubenswrapper[4811]: I0128 17:22:00.339901 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:22:00 crc kubenswrapper[4811]: E0128 17:22:00.340496 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:22:00 crc kubenswrapper[4811]: I0128 17:22:00.774395 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:22:00 crc kubenswrapper[4811]: I0128 17:22:00.775292 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api-log" containerID="cri-o://5f6c34b1699ae0be2dcba3224e15a64455f7b0a01acfbec742ad2a0639ee1edb" gracePeriod=30 Jan 28 17:22:00 crc kubenswrapper[4811]: I0128 17:22:00.776068 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api" containerID="cri-o://bb8659c4081958bfe7fa364c23e63d9e2bff60b485f1c51cf2ed5e0aeddce143" gracePeriod=30 Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.067179 4811 generic.go:334] "Generic (PLEG): container finished" podID="017a6c23-059b-4451-a487-660ef4033bc1" containerID="5f6c34b1699ae0be2dcba3224e15a64455f7b0a01acfbec742ad2a0639ee1edb" exitCode=143 Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.067281 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"017a6c23-059b-4451-a487-660ef4033bc1","Type":"ContainerDied","Data":"5f6c34b1699ae0be2dcba3224e15a64455f7b0a01acfbec742ad2a0639ee1edb"} Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.071325 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa","Type":"ContainerStarted","Data":"d5e2fdfc75f6779847130c576eb9e4beb9b75dd6e36ae314d84ec42bf30d007e"} Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.071367 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa","Type":"ContainerStarted","Data":"e91e320e134c880ced8ed65002d882edd39223690a4b42ed3ed761d2cddd5787"} Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.095408 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.095387987 podStartE2EDuration="2.095387987s" podCreationTimestamp="2026-01-28 17:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:22:01.090348259 +0000 UTC m=+5813.844711842" watchObservedRunningTime="2026-01-28 17:22:01.095387987 +0000 UTC m=+5813.849751570" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.371735 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.373889 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.375461 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.384632 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.399109 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.447643 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.448011 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw9n2\" (UniqueName: \"kubernetes.io/projected/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-kube-api-access-zw9n2\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.448063 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.450703 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452015 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452071 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452098 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452199 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452245 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-run\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452270 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452449 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452499 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452528 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452661 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452698 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452760 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.452822 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558579 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558644 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-run\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558674 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558714 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558737 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558758 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558831 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558852 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558897 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558897 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558956 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.558998 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559022 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559039 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw9n2\" (UniqueName: \"kubernetes.io/projected/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-kube-api-access-zw9n2\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559093 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559167 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559197 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559218 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559389 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559452 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559529 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559567 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559809 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559818 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.559850 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.560705 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-run\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.564199 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.565227 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.567120 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.571089 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.578339 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.587494 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw9n2\" (UniqueName: \"kubernetes.io/projected/c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4-kube-api-access-zw9n2\") pod \"cinder-volume-volume1-0\" (UID: \"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4\") " pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:01 crc kubenswrapper[4811]: I0128 17:22:01.734057 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.107476 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.152124 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.153788 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.157413 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.165454 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.273887 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-lib-modules\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.273960 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8df06644-e477-431f-893c-ebabbc754a77-ceph\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274003 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274028 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-dev\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274065 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-nvme\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274510 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274589 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7rv2\" (UniqueName: \"kubernetes.io/projected/8df06644-e477-431f-893c-ebabbc754a77-kube-api-access-x7rv2\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274655 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-run\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274689 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-sys\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274806 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-scripts\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274837 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274924 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.274970 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-config-data\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.275071 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-config-data-custom\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.275155 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.275201 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.336743 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jan 28 17:22:02 crc kubenswrapper[4811]: W0128 17:22:02.343917 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4f66ea0_a4fb_44c3_8390_4bbdedeb61b4.slice/crio-c729181103ec658f39195a95ed968cb9aa5bc5af00a9aca98a2b85d299be4d4f WatchSource:0}: Error finding container c729181103ec658f39195a95ed968cb9aa5bc5af00a9aca98a2b85d299be4d4f: Status 404 returned error can't find the container with id c729181103ec658f39195a95ed968cb9aa5bc5af00a9aca98a2b85d299be4d4f Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379114 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-lib-modules\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379444 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8df06644-e477-431f-893c-ebabbc754a77-ceph\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379478 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379499 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-dev\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379523 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-nvme\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379774 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379794 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7rv2\" (UniqueName: \"kubernetes.io/projected/8df06644-e477-431f-893c-ebabbc754a77-kube-api-access-x7rv2\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379816 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-run\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379830 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-sys\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379865 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-scripts\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379881 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379924 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379939 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-config-data\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380118 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-dev\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380307 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-config-data-custom\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380109 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-sys\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380401 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-run\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380079 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380424 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.379258 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-lib-modules\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380790 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-nvme\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380874 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.380912 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.381191 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.381208 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.381267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8df06644-e477-431f-893c-ebabbc754a77-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.392294 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.400261 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8df06644-e477-431f-893c-ebabbc754a77-ceph\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.400898 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-config-data\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.401977 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-scripts\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.405957 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8df06644-e477-431f-893c-ebabbc754a77-config-data-custom\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.434176 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7rv2\" (UniqueName: \"kubernetes.io/projected/8df06644-e477-431f-893c-ebabbc754a77-kube-api-access-x7rv2\") pod \"cinder-backup-0\" (UID: \"8df06644-e477-431f-893c-ebabbc754a77\") " pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.443908 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hfmlk"] Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.445769 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.475739 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hfmlk"] Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.525334 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.610593 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hv9q\" (UniqueName: \"kubernetes.io/projected/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-kube-api-access-4hv9q\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.610686 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-catalog-content\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.610829 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-utilities\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.712937 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-utilities\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.713227 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hv9q\" (UniqueName: \"kubernetes.io/projected/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-kube-api-access-4hv9q\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.713283 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-catalog-content\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.713805 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-utilities\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.713949 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-catalog-content\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.735889 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hv9q\" (UniqueName: \"kubernetes.io/projected/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-kube-api-access-4hv9q\") pod \"redhat-operators-hfmlk\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:02 crc kubenswrapper[4811]: I0128 17:22:02.777524 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:03 crc kubenswrapper[4811]: I0128 17:22:03.098507 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4","Type":"ContainerStarted","Data":"c729181103ec658f39195a95ed968cb9aa5bc5af00a9aca98a2b85d299be4d4f"} Jan 28 17:22:03 crc kubenswrapper[4811]: I0128 17:22:03.270452 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jan 28 17:22:03 crc kubenswrapper[4811]: I0128 17:22:03.312067 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hfmlk"] Jan 28 17:22:03 crc kubenswrapper[4811]: I0128 17:22:03.952235 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.81:8776/healthcheck\": read tcp 10.217.0.2:53604->10.217.1.81:8776: read: connection reset by peer" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.117173 4811 generic.go:334] "Generic (PLEG): container finished" podID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerID="0575bbaea2b2aa570c7a1620ff0b4f4ac859eb3031572cf984ec3bc3b806de78" exitCode=0 Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.117529 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerDied","Data":"0575bbaea2b2aa570c7a1620ff0b4f4ac859eb3031572cf984ec3bc3b806de78"} Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.117562 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerStarted","Data":"de501524387387498426ebb8ea01b584656ccd5eb6a4cca8acb4e86571909bc8"} Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.122818 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"8df06644-e477-431f-893c-ebabbc754a77","Type":"ContainerStarted","Data":"dee5eff7bdae18b69c54a3b30c7606a337c1d8c977d0abe0d9bbcf7724f48f92"} Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.134215 4811 generic.go:334] "Generic (PLEG): container finished" podID="017a6c23-059b-4451-a487-660ef4033bc1" containerID="bb8659c4081958bfe7fa364c23e63d9e2bff60b485f1c51cf2ed5e0aeddce143" exitCode=0 Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.134262 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"017a6c23-059b-4451-a487-660ef4033bc1","Type":"ContainerDied","Data":"bb8659c4081958bfe7fa364c23e63d9e2bff60b485f1c51cf2ed5e0aeddce143"} Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.382877 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.450256 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.569691 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-combined-ca-bundle\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.570074 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-scripts\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.570147 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glff5\" (UniqueName: \"kubernetes.io/projected/017a6c23-059b-4451-a487-660ef4033bc1-kube-api-access-glff5\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.570242 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data-custom\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.570309 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.571415 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/017a6c23-059b-4451-a487-660ef4033bc1-logs\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.571486 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/017a6c23-059b-4451-a487-660ef4033bc1-etc-machine-id\") pod \"017a6c23-059b-4451-a487-660ef4033bc1\" (UID: \"017a6c23-059b-4451-a487-660ef4033bc1\") " Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.573225 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/017a6c23-059b-4451-a487-660ef4033bc1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.576843 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/017a6c23-059b-4451-a487-660ef4033bc1-logs" (OuterVolumeSpecName: "logs") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.586563 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.587130 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-scripts" (OuterVolumeSpecName: "scripts") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.600164 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/017a6c23-059b-4451-a487-660ef4033bc1-kube-api-access-glff5" (OuterVolumeSpecName: "kube-api-access-glff5") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "kube-api-access-glff5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.675778 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/017a6c23-059b-4451-a487-660ef4033bc1-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.675820 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/017a6c23-059b-4451-a487-660ef4033bc1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.675831 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.675842 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glff5\" (UniqueName: \"kubernetes.io/projected/017a6c23-059b-4451-a487-660ef4033bc1-kube-api-access-glff5\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.675855 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.686911 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.727829 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data" (OuterVolumeSpecName: "config-data") pod "017a6c23-059b-4451-a487-660ef4033bc1" (UID: "017a6c23-059b-4451-a487-660ef4033bc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.778216 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:04 crc kubenswrapper[4811]: I0128 17:22:04.778646 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017a6c23-059b-4451-a487-660ef4033bc1-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.149997 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerStarted","Data":"390f2c5b66b556e2fab0b468ed4224432b34169005478c36879593310fea6e27"} Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.153271 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4","Type":"ContainerStarted","Data":"ef42ebc705b92591038285f10dd91b18e6cc3553a8c3e0442d1f44b977c79f6d"} Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.153324 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4","Type":"ContainerStarted","Data":"677bcd53c8dac73bc66e554dd1aac917154289371e40e132b9e7708aad36fa13"} Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.161795 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"8df06644-e477-431f-893c-ebabbc754a77","Type":"ContainerStarted","Data":"5d036e31dbeb0e2f534549fc55628afe8704b1b774723a34c074163663cb6690"} Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.161850 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"8df06644-e477-431f-893c-ebabbc754a77","Type":"ContainerStarted","Data":"d5270948efe31ceef7be3ad8ec4effb99f56944dba6b26adba0f2d2db8b5bbbb"} Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.169636 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"017a6c23-059b-4451-a487-660ef4033bc1","Type":"ContainerDied","Data":"d6bd91870b457201b60754f93c018a258f6eced1f6e3b7f1e2223b66b4a876ed"} Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.169691 4811 scope.go:117] "RemoveContainer" containerID="bb8659c4081958bfe7fa364c23e63d9e2bff60b485f1c51cf2ed5e0aeddce143" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.169850 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.209362 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.570072665 podStartE2EDuration="4.2093367s" podCreationTimestamp="2026-01-28 17:22:01 +0000 UTC" firstStartedPulling="2026-01-28 17:22:02.366348061 +0000 UTC m=+5815.120711644" lastFinishedPulling="2026-01-28 17:22:04.005612096 +0000 UTC m=+5816.759975679" observedRunningTime="2026-01-28 17:22:05.202601007 +0000 UTC m=+5817.956964610" watchObservedRunningTime="2026-01-28 17:22:05.2093367 +0000 UTC m=+5817.963700293" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.218802 4811 scope.go:117] "RemoveContainer" containerID="5f6c34b1699ae0be2dcba3224e15a64455f7b0a01acfbec742ad2a0639ee1edb" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.249678 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.2289622 podStartE2EDuration="3.249639288s" podCreationTimestamp="2026-01-28 17:22:02 +0000 UTC" firstStartedPulling="2026-01-28 17:22:03.28387551 +0000 UTC m=+5816.038239093" lastFinishedPulling="2026-01-28 17:22:04.304552598 +0000 UTC m=+5817.058916181" observedRunningTime="2026-01-28 17:22:05.238045412 +0000 UTC m=+5817.992408995" watchObservedRunningTime="2026-01-28 17:22:05.249639288 +0000 UTC m=+5818.004002871" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.269121 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.280283 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.308504 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:22:05 crc kubenswrapper[4811]: E0128 17:22:05.309034 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.309051 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api" Jan 28 17:22:05 crc kubenswrapper[4811]: E0128 17:22:05.309080 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api-log" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.309090 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api-log" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.309354 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api-log" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.309369 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="017a6c23-059b-4451-a487-660ef4033bc1" containerName="cinder-api" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.310834 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.314138 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.322379 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.384058 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.385647 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.387407 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.494233 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-config-data\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.494657 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-config-data-custom\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.494792 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e7123fa-834f-4624-9261-e36297a71ce0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.494956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6js2\" (UniqueName: \"kubernetes.io/projected/4e7123fa-834f-4624-9261-e36297a71ce0-kube-api-access-z6js2\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.495124 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.495237 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e7123fa-834f-4624-9261-e36297a71ce0-logs\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.495355 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-scripts\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597728 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-config-data-custom\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597791 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e7123fa-834f-4624-9261-e36297a71ce0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597861 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6js2\" (UniqueName: \"kubernetes.io/projected/4e7123fa-834f-4624-9261-e36297a71ce0-kube-api-access-z6js2\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597894 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e7123fa-834f-4624-9261-e36297a71ce0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597923 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597979 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e7123fa-834f-4624-9261-e36297a71ce0-logs\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.597998 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-scripts\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.598049 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-config-data\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.598326 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e7123fa-834f-4624-9261-e36297a71ce0-logs\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.604981 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.605231 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-scripts\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.606024 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-config-data-custom\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.607388 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7123fa-834f-4624-9261-e36297a71ce0-config-data\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.607950 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.609378 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.619534 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.619917 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6js2\" (UniqueName: \"kubernetes.io/projected/4e7123fa-834f-4624-9261-e36297a71ce0-kube-api-access-z6js2\") pod \"cinder-api-0\" (UID: \"4e7123fa-834f-4624-9261-e36297a71ce0\") " pod="openstack/cinder-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.625190 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 17:22:05 crc kubenswrapper[4811]: I0128 17:22:05.653244 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.166623 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.188376 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4e7123fa-834f-4624-9261-e36297a71ce0","Type":"ContainerStarted","Data":"1b78591883d290a365aaaf0eb6a1a541df7542c904dd8aee0bb6e5dd8ffe72fb"} Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.191351 4811 generic.go:334] "Generic (PLEG): container finished" podID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerID="390f2c5b66b556e2fab0b468ed4224432b34169005478c36879593310fea6e27" exitCode=0 Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.192424 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerDied","Data":"390f2c5b66b556e2fab0b468ed4224432b34169005478c36879593310fea6e27"} Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.192503 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.198217 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.207812 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.360120 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="017a6c23-059b-4451-a487-660ef4033bc1" path="/var/lib/kubelet/pods/017a6c23-059b-4451-a487-660ef4033bc1/volumes" Jan 28 17:22:06 crc kubenswrapper[4811]: I0128 17:22:06.734878 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:07 crc kubenswrapper[4811]: I0128 17:22:07.202613 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4e7123fa-834f-4624-9261-e36297a71ce0","Type":"ContainerStarted","Data":"27e13336940cbce0235b40644d83b08a6bc6ed5cf20a83c11d78de979ae0ee49"} Jan 28 17:22:07 crc kubenswrapper[4811]: I0128 17:22:07.525775 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Jan 28 17:22:08 crc kubenswrapper[4811]: I0128 17:22:08.213668 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerStarted","Data":"db515d500220b6dbaa3dbee20dc0ed5b788ff8b093ee30b731f312ef0f4b6dd2"} Jan 28 17:22:08 crc kubenswrapper[4811]: I0128 17:22:08.218144 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4e7123fa-834f-4624-9261-e36297a71ce0","Type":"ContainerStarted","Data":"c68621169c71b639fc80db09f6821cfe8b4654bc9af26bae8d28846cd39bbe69"} Jan 28 17:22:08 crc kubenswrapper[4811]: I0128 17:22:08.218193 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 28 17:22:08 crc kubenswrapper[4811]: I0128 17:22:08.250027 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hfmlk" podStartSLOduration=3.638283522 podStartE2EDuration="6.250005182s" podCreationTimestamp="2026-01-28 17:22:02 +0000 UTC" firstStartedPulling="2026-01-28 17:22:04.230753238 +0000 UTC m=+5816.985116811" lastFinishedPulling="2026-01-28 17:22:06.842474888 +0000 UTC m=+5819.596838471" observedRunningTime="2026-01-28 17:22:08.241147511 +0000 UTC m=+5820.995511094" watchObservedRunningTime="2026-01-28 17:22:08.250005182 +0000 UTC m=+5821.004368775" Jan 28 17:22:08 crc kubenswrapper[4811]: I0128 17:22:08.268243 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.268220549 podStartE2EDuration="3.268220549s" podCreationTimestamp="2026-01-28 17:22:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:22:08.2616639 +0000 UTC m=+5821.016027543" watchObservedRunningTime="2026-01-28 17:22:08.268220549 +0000 UTC m=+5821.022584132" Jan 28 17:22:09 crc kubenswrapper[4811]: I0128 17:22:09.625581 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 28 17:22:09 crc kubenswrapper[4811]: I0128 17:22:09.675104 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:10 crc kubenswrapper[4811]: I0128 17:22:10.236880 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="cinder-scheduler" containerID="cri-o://e91e320e134c880ced8ed65002d882edd39223690a4b42ed3ed761d2cddd5787" gracePeriod=30 Jan 28 17:22:10 crc kubenswrapper[4811]: I0128 17:22:10.236924 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="probe" containerID="cri-o://d5e2fdfc75f6779847130c576eb9e4beb9b75dd6e36ae314d84ec42bf30d007e" gracePeriod=30 Jan 28 17:22:11 crc kubenswrapper[4811]: I0128 17:22:11.981983 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Jan 28 17:22:12 crc kubenswrapper[4811]: I0128 17:22:12.730767 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Jan 28 17:22:12 crc kubenswrapper[4811]: I0128 17:22:12.778375 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:12 crc kubenswrapper[4811]: I0128 17:22:12.778425 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:13 crc kubenswrapper[4811]: I0128 17:22:13.269982 4811 generic.go:334] "Generic (PLEG): container finished" podID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerID="e91e320e134c880ced8ed65002d882edd39223690a4b42ed3ed761d2cddd5787" exitCode=0 Jan 28 17:22:13 crc kubenswrapper[4811]: I0128 17:22:13.270025 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa","Type":"ContainerDied","Data":"e91e320e134c880ced8ed65002d882edd39223690a4b42ed3ed761d2cddd5787"} Jan 28 17:22:13 crc kubenswrapper[4811]: I0128 17:22:13.827330 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hfmlk" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="registry-server" probeResult="failure" output=< Jan 28 17:22:13 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:22:13 crc kubenswrapper[4811]: > Jan 28 17:22:14 crc kubenswrapper[4811]: I0128 17:22:14.339510 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:22:14 crc kubenswrapper[4811]: E0128 17:22:14.339987 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.854009 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.963988 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9hv5\" (UniqueName: \"kubernetes.io/projected/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-kube-api-access-f9hv5\") pod \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.964392 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-combined-ca-bundle\") pod \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.964603 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-scripts\") pod \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.964648 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data\") pod \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.964709 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data-custom\") pod \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.964746 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-etc-machine-id\") pod \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\" (UID: \"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa\") " Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.965290 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" (UID: "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.973929 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-kube-api-access-f9hv5" (OuterVolumeSpecName: "kube-api-access-f9hv5") pod "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" (UID: "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa"). InnerVolumeSpecName "kube-api-access-f9hv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.986211 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" (UID: "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:16 crc kubenswrapper[4811]: I0128 17:22:16.994959 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-scripts" (OuterVolumeSpecName: "scripts") pod "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" (UID: "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.048615 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" (UID: "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.066693 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9hv5\" (UniqueName: \"kubernetes.io/projected/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-kube-api-access-f9hv5\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.066726 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.066735 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.066743 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.066752 4811 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.068692 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data" (OuterVolumeSpecName: "config-data") pod "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" (UID: "ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.122016 4811 generic.go:334] "Generic (PLEG): container finished" podID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerID="d5e2fdfc75f6779847130c576eb9e4beb9b75dd6e36ae314d84ec42bf30d007e" exitCode=0 Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.122070 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa","Type":"ContainerDied","Data":"d5e2fdfc75f6779847130c576eb9e4beb9b75dd6e36ae314d84ec42bf30d007e"} Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.122116 4811 scope.go:117] "RemoveContainer" containerID="d5e2fdfc75f6779847130c576eb9e4beb9b75dd6e36ae314d84ec42bf30d007e" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.158560 4811 scope.go:117] "RemoveContainer" containerID="e91e320e134c880ced8ed65002d882edd39223690a4b42ed3ed761d2cddd5787" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.169310 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:17 crc kubenswrapper[4811]: I0128 17:22:17.618670 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.130293 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa","Type":"ContainerDied","Data":"d6bdcb0c9c7daafccab9df4ff954efebe3dcdeb5f0876c7e3bedf6277254340f"} Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.130326 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.171139 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.180770 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.197606 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:18 crc kubenswrapper[4811]: E0128 17:22:18.198099 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="probe" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.198126 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="probe" Jan 28 17:22:18 crc kubenswrapper[4811]: E0128 17:22:18.198150 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="cinder-scheduler" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.198159 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="cinder-scheduler" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.198385 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="probe" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.198417 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" containerName="cinder-scheduler" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.199635 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.203748 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.214750 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.292686 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.292736 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.292763 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-scripts\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.292895 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e61975-afaa-4739-8174-344e50e4c21a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.293191 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-config-data\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.293249 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9qrc\" (UniqueName: \"kubernetes.io/projected/e0e61975-afaa-4739-8174-344e50e4c21a-kube-api-access-q9qrc\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.358026 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa" path="/var/lib/kubelet/pods/ecc03a47-4c6b-41e4-aa08-1c87f51c7eaa/volumes" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.395553 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-config-data\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.395617 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9qrc\" (UniqueName: \"kubernetes.io/projected/e0e61975-afaa-4739-8174-344e50e4c21a-kube-api-access-q9qrc\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.395753 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.395791 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.395818 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-scripts\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.395843 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e61975-afaa-4739-8174-344e50e4c21a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.401573 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e61975-afaa-4739-8174-344e50e4c21a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.406799 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-scripts\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.406862 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.410857 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.422485 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e61975-afaa-4739-8174-344e50e4c21a-config-data\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.425900 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9qrc\" (UniqueName: \"kubernetes.io/projected/e0e61975-afaa-4739-8174-344e50e4c21a-kube-api-access-q9qrc\") pod \"cinder-scheduler-0\" (UID: \"e0e61975-afaa-4739-8174-344e50e4c21a\") " pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.518560 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 28 17:22:18 crc kubenswrapper[4811]: I0128 17:22:18.930089 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 28 17:22:19 crc kubenswrapper[4811]: I0128 17:22:19.139613 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e0e61975-afaa-4739-8174-344e50e4c21a","Type":"ContainerStarted","Data":"d403e2d79f321557e9932cf417556651a5c1ce6beace7e59039fb490e7f055bb"} Jan 28 17:22:20 crc kubenswrapper[4811]: I0128 17:22:20.155381 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e0e61975-afaa-4739-8174-344e50e4c21a","Type":"ContainerStarted","Data":"f8e70e0bc727ffa6ba05dae5701a4c1117427bc06b4de953d1fbdbcab7a89d80"} Jan 28 17:22:22 crc kubenswrapper[4811]: I0128 17:22:22.182621 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e0e61975-afaa-4739-8174-344e50e4c21a","Type":"ContainerStarted","Data":"0f1bb4e3b84f760cc7a5c90607379ba5281143702feb1cfc25989f848bff10ba"} Jan 28 17:22:22 crc kubenswrapper[4811]: I0128 17:22:22.219375 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.219346289 podStartE2EDuration="4.219346289s" podCreationTimestamp="2026-01-28 17:22:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:22:22.211221047 +0000 UTC m=+5834.965584630" watchObservedRunningTime="2026-01-28 17:22:22.219346289 +0000 UTC m=+5834.973709882" Jan 28 17:22:22 crc kubenswrapper[4811]: I0128 17:22:22.827718 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:22 crc kubenswrapper[4811]: I0128 17:22:22.878287 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:23 crc kubenswrapper[4811]: I0128 17:22:23.136893 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hfmlk"] Jan 28 17:22:23 crc kubenswrapper[4811]: I0128 17:22:23.520212 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 28 17:22:24 crc kubenswrapper[4811]: I0128 17:22:24.205610 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hfmlk" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="registry-server" containerID="cri-o://db515d500220b6dbaa3dbee20dc0ed5b788ff8b093ee30b731f312ef0f4b6dd2" gracePeriod=2 Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.238909 4811 generic.go:334] "Generic (PLEG): container finished" podID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerID="db515d500220b6dbaa3dbee20dc0ed5b788ff8b093ee30b731f312ef0f4b6dd2" exitCode=0 Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.239008 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerDied","Data":"db515d500220b6dbaa3dbee20dc0ed5b788ff8b093ee30b731f312ef0f4b6dd2"} Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.239196 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hfmlk" event={"ID":"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32","Type":"ContainerDied","Data":"de501524387387498426ebb8ea01b584656ccd5eb6a4cca8acb4e86571909bc8"} Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.239211 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de501524387387498426ebb8ea01b584656ccd5eb6a4cca8acb4e86571909bc8" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.282393 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.344145 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:22:25 crc kubenswrapper[4811]: E0128 17:22:25.345294 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.447248 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-catalog-content\") pod \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.447418 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hv9q\" (UniqueName: \"kubernetes.io/projected/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-kube-api-access-4hv9q\") pod \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.447531 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-utilities\") pod \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\" (UID: \"9b1c7e2d-5844-4403-a5ca-cad7b15f0a32\") " Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.449187 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-utilities" (OuterVolumeSpecName: "utilities") pod "9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" (UID: "9b1c7e2d-5844-4403-a5ca-cad7b15f0a32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.454404 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-kube-api-access-4hv9q" (OuterVolumeSpecName: "kube-api-access-4hv9q") pod "9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" (UID: "9b1c7e2d-5844-4403-a5ca-cad7b15f0a32"). InnerVolumeSpecName "kube-api-access-4hv9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.550201 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hv9q\" (UniqueName: \"kubernetes.io/projected/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-kube-api-access-4hv9q\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.550239 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.580478 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" (UID: "9b1c7e2d-5844-4403-a5ca-cad7b15f0a32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:22:25 crc kubenswrapper[4811]: I0128 17:22:25.652593 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:22:26 crc kubenswrapper[4811]: I0128 17:22:26.246469 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hfmlk" Jan 28 17:22:26 crc kubenswrapper[4811]: I0128 17:22:26.283634 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hfmlk"] Jan 28 17:22:26 crc kubenswrapper[4811]: I0128 17:22:26.291519 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hfmlk"] Jan 28 17:22:26 crc kubenswrapper[4811]: I0128 17:22:26.350664 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" path="/var/lib/kubelet/pods/9b1c7e2d-5844-4403-a5ca-cad7b15f0a32/volumes" Jan 28 17:22:28 crc kubenswrapper[4811]: I0128 17:22:28.734100 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 28 17:22:36 crc kubenswrapper[4811]: I0128 17:22:36.339998 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:22:36 crc kubenswrapper[4811]: E0128 17:22:36.340785 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:22:50 crc kubenswrapper[4811]: I0128 17:22:50.339608 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:22:50 crc kubenswrapper[4811]: E0128 17:22:50.340898 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:23:03 crc kubenswrapper[4811]: I0128 17:23:03.339980 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:23:03 crc kubenswrapper[4811]: E0128 17:23:03.340818 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:23:14 crc kubenswrapper[4811]: I0128 17:23:14.339881 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:23:14 crc kubenswrapper[4811]: E0128 17:23:14.341385 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:23:25 crc kubenswrapper[4811]: I0128 17:23:25.340252 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:23:25 crc kubenswrapper[4811]: E0128 17:23:25.341112 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:23:31 crc kubenswrapper[4811]: I0128 17:23:31.412863 4811 scope.go:117] "RemoveContainer" containerID="e73ffc237d8e1392056c3e79574d66e62cd7397d6831639a05ac08a8e301462f" Jan 28 17:23:31 crc kubenswrapper[4811]: I0128 17:23:31.440534 4811 scope.go:117] "RemoveContainer" containerID="8f55702c494dd197f2608cc87c20b050ff4854178804e493a8c498efb3c19e44" Jan 28 17:23:38 crc kubenswrapper[4811]: I0128 17:23:38.349235 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:23:38 crc kubenswrapper[4811]: E0128 17:23:38.350162 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:23:47 crc kubenswrapper[4811]: I0128 17:23:47.043416 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-b9qzk"] Jan 28 17:23:47 crc kubenswrapper[4811]: I0128 17:23:47.054146 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bf7c-account-create-update-mtvzp"] Jan 28 17:23:47 crc kubenswrapper[4811]: I0128 17:23:47.065177 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bf7c-account-create-update-mtvzp"] Jan 28 17:23:47 crc kubenswrapper[4811]: I0128 17:23:47.077390 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-b9qzk"] Jan 28 17:23:48 crc kubenswrapper[4811]: I0128 17:23:48.354188 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94b13d6f-c55f-4343-a54b-c40781c41409" path="/var/lib/kubelet/pods/94b13d6f-c55f-4343-a54b-c40781c41409/volumes" Jan 28 17:23:48 crc kubenswrapper[4811]: I0128 17:23:48.355114 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe6770a-3eae-4a81-82fe-394dbf30b626" path="/var/lib/kubelet/pods/ebe6770a-3eae-4a81-82fe-394dbf30b626/volumes" Jan 28 17:23:53 crc kubenswrapper[4811]: I0128 17:23:53.339385 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:23:53 crc kubenswrapper[4811]: E0128 17:23:53.339906 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:23:57 crc kubenswrapper[4811]: I0128 17:23:57.042566 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qzb7h"] Jan 28 17:23:57 crc kubenswrapper[4811]: I0128 17:23:57.050826 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qzb7h"] Jan 28 17:23:58 crc kubenswrapper[4811]: I0128 17:23:58.349505 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="466b541a-ec7d-4826-947e-c57617810071" path="/var/lib/kubelet/pods/466b541a-ec7d-4826-947e-c57617810071/volumes" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.570860 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-6zndg"] Jan 28 17:23:59 crc kubenswrapper[4811]: E0128 17:23:59.571596 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="registry-server" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.571613 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="registry-server" Jan 28 17:23:59 crc kubenswrapper[4811]: E0128 17:23:59.571644 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="extract-utilities" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.571653 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="extract-utilities" Jan 28 17:23:59 crc kubenswrapper[4811]: E0128 17:23:59.571671 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="extract-content" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.571679 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="extract-content" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.571908 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b1c7e2d-5844-4403-a5ca-cad7b15f0a32" containerName="registry-server" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.572723 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.575028 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-kq6xx" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.575656 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.583085 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-4w45h"] Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.585600 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.595925 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6zndg"] Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.620527 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-4w45h"] Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.719706 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-log\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.719760 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-run\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.719783 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqpvj\" (UniqueName: \"kubernetes.io/projected/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-kube-api-access-rqpvj\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.719873 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-lib\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.719907 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-scripts\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.719998 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-run\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.720098 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-etc-ovs\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.720134 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfb5a500-7066-4649-9116-571b80da5d8d-scripts\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.720200 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-log-ovn\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.720234 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-run-ovn\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.720286 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc74v\" (UniqueName: \"kubernetes.io/projected/bfb5a500-7066-4649-9116-571b80da5d8d-kube-api-access-hc74v\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.822462 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc74v\" (UniqueName: \"kubernetes.io/projected/bfb5a500-7066-4649-9116-571b80da5d8d-kube-api-access-hc74v\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.822869 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-log\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823008 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-run\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823102 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqpvj\" (UniqueName: \"kubernetes.io/projected/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-kube-api-access-rqpvj\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823244 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-lib\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823344 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-run\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823317 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-lib\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823358 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-scripts\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823256 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-var-log\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823475 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-run\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823520 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-etc-ovs\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823557 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfb5a500-7066-4649-9116-571b80da5d8d-scripts\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823606 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-run\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823738 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-log-ovn\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823777 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-run-ovn\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823787 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-log-ovn\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823736 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/bfb5a500-7066-4649-9116-571b80da5d8d-etc-ovs\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.823923 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-var-run-ovn\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.825645 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfb5a500-7066-4649-9116-571b80da5d8d-scripts\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.826344 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-scripts\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.841316 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc74v\" (UniqueName: \"kubernetes.io/projected/bfb5a500-7066-4649-9116-571b80da5d8d-kube-api-access-hc74v\") pod \"ovn-controller-ovs-4w45h\" (UID: \"bfb5a500-7066-4649-9116-571b80da5d8d\") " pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.841421 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqpvj\" (UniqueName: \"kubernetes.io/projected/7bf5f5e6-489a-4e4a-9974-e0d4c029534e-kube-api-access-rqpvj\") pod \"ovn-controller-6zndg\" (UID: \"7bf5f5e6-489a-4e4a-9974-e0d4c029534e\") " pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.911924 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6zndg" Jan 28 17:23:59 crc kubenswrapper[4811]: I0128 17:23:59.928812 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:24:00 crc kubenswrapper[4811]: I0128 17:24:00.409375 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6zndg"] Jan 28 17:24:00 crc kubenswrapper[4811]: I0128 17:24:00.880902 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-4w45h"] Jan 28 17:24:00 crc kubenswrapper[4811]: W0128 17:24:00.888451 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfb5a500_7066_4649_9116_571b80da5d8d.slice/crio-eed22678041214bc4608bae70670fb9010a0bfa3dfc5a5faf4e718c96078d195 WatchSource:0}: Error finding container eed22678041214bc4608bae70670fb9010a0bfa3dfc5a5faf4e718c96078d195: Status 404 returned error can't find the container with id eed22678041214bc4608bae70670fb9010a0bfa3dfc5a5faf4e718c96078d195 Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.071245 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4w45h" event={"ID":"bfb5a500-7066-4649-9116-571b80da5d8d","Type":"ContainerStarted","Data":"eed22678041214bc4608bae70670fb9010a0bfa3dfc5a5faf4e718c96078d195"} Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.087486 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6zndg" event={"ID":"7bf5f5e6-489a-4e4a-9974-e0d4c029534e","Type":"ContainerStarted","Data":"52c616f758bdd49f4c0813ec5f85be32a7899eb5381ecbc25644fb2c9c1eed5f"} Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.087538 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6zndg" event={"ID":"7bf5f5e6-489a-4e4a-9974-e0d4c029534e","Type":"ContainerStarted","Data":"a793ba4581a976813ae1f44f8fd8500445211e0ebbfe628b22bbfd3dbccf04e3"} Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.087775 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-6zndg" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.110376 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-6zndg" podStartSLOduration=2.11035904 podStartE2EDuration="2.11035904s" podCreationTimestamp="2026-01-28 17:23:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:24:01.101989992 +0000 UTC m=+5933.856353575" watchObservedRunningTime="2026-01-28 17:24:01.11035904 +0000 UTC m=+5933.864722623" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.178015 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-99r2m"] Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.179361 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.184388 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.189958 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-99r2m"] Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.253254 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d595e34c-15d9-4b48-b9d9-06d8baa95833-ovs-rundir\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.253335 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d595e34c-15d9-4b48-b9d9-06d8baa95833-ovn-rundir\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.253403 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27ngq\" (UniqueName: \"kubernetes.io/projected/d595e34c-15d9-4b48-b9d9-06d8baa95833-kube-api-access-27ngq\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.253458 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d595e34c-15d9-4b48-b9d9-06d8baa95833-config\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.354442 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d595e34c-15d9-4b48-b9d9-06d8baa95833-ovs-rundir\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.354536 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d595e34c-15d9-4b48-b9d9-06d8baa95833-ovn-rundir\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.354612 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27ngq\" (UniqueName: \"kubernetes.io/projected/d595e34c-15d9-4b48-b9d9-06d8baa95833-kube-api-access-27ngq\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.354648 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d595e34c-15d9-4b48-b9d9-06d8baa95833-config\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.354723 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d595e34c-15d9-4b48-b9d9-06d8baa95833-ovs-rundir\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.354762 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d595e34c-15d9-4b48-b9d9-06d8baa95833-ovn-rundir\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.355466 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d595e34c-15d9-4b48-b9d9-06d8baa95833-config\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.373299 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27ngq\" (UniqueName: \"kubernetes.io/projected/d595e34c-15d9-4b48-b9d9-06d8baa95833-kube-api-access-27ngq\") pod \"ovn-controller-metrics-99r2m\" (UID: \"d595e34c-15d9-4b48-b9d9-06d8baa95833\") " pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.513467 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-99r2m" Jan 28 17:24:01 crc kubenswrapper[4811]: W0128 17:24:01.977853 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd595e34c_15d9_4b48_b9d9_06d8baa95833.slice/crio-12337e934a6244f3b119b9af73099d94fcaa48674dcc48e881e197b9ef6226b8 WatchSource:0}: Error finding container 12337e934a6244f3b119b9af73099d94fcaa48674dcc48e881e197b9ef6226b8: Status 404 returned error can't find the container with id 12337e934a6244f3b119b9af73099d94fcaa48674dcc48e881e197b9ef6226b8 Jan 28 17:24:01 crc kubenswrapper[4811]: I0128 17:24:01.982383 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-99r2m"] Jan 28 17:24:02 crc kubenswrapper[4811]: I0128 17:24:02.101779 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-99r2m" event={"ID":"d595e34c-15d9-4b48-b9d9-06d8baa95833","Type":"ContainerStarted","Data":"12337e934a6244f3b119b9af73099d94fcaa48674dcc48e881e197b9ef6226b8"} Jan 28 17:24:02 crc kubenswrapper[4811]: I0128 17:24:02.105560 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4w45h" event={"ID":"bfb5a500-7066-4649-9116-571b80da5d8d","Type":"ContainerStarted","Data":"b00574786be2c58fd57f842e017be1ae16d9650478dbefd2f164682ef25ab472"} Jan 28 17:24:03 crc kubenswrapper[4811]: I0128 17:24:03.112233 4811 generic.go:334] "Generic (PLEG): container finished" podID="bfb5a500-7066-4649-9116-571b80da5d8d" containerID="b00574786be2c58fd57f842e017be1ae16d9650478dbefd2f164682ef25ab472" exitCode=0 Jan 28 17:24:03 crc kubenswrapper[4811]: I0128 17:24:03.112336 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4w45h" event={"ID":"bfb5a500-7066-4649-9116-571b80da5d8d","Type":"ContainerDied","Data":"b00574786be2c58fd57f842e017be1ae16d9650478dbefd2f164682ef25ab472"} Jan 28 17:24:03 crc kubenswrapper[4811]: I0128 17:24:03.114273 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-99r2m" event={"ID":"d595e34c-15d9-4b48-b9d9-06d8baa95833","Type":"ContainerStarted","Data":"17c3d7a5e86bb5d6dec3b250d0552c92720360da6fe0be2b033c3e3f6a5ce4ff"} Jan 28 17:24:03 crc kubenswrapper[4811]: I0128 17:24:03.155250 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-99r2m" podStartSLOduration=2.155231491 podStartE2EDuration="2.155231491s" podCreationTimestamp="2026-01-28 17:24:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:24:03.1544093 +0000 UTC m=+5935.908772903" watchObservedRunningTime="2026-01-28 17:24:03.155231491 +0000 UTC m=+5935.909595074" Jan 28 17:24:04 crc kubenswrapper[4811]: I0128 17:24:04.129618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4w45h" event={"ID":"bfb5a500-7066-4649-9116-571b80da5d8d","Type":"ContainerStarted","Data":"94772ace607e2889cb770e07aa28294927719810ad02021d639de715bed36e4f"} Jan 28 17:24:04 crc kubenswrapper[4811]: I0128 17:24:04.129993 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4w45h" event={"ID":"bfb5a500-7066-4649-9116-571b80da5d8d","Type":"ContainerStarted","Data":"09c38049db57bf3d20c80d016001fcced1e96f15f93af4c510953b8678829962"} Jan 28 17:24:04 crc kubenswrapper[4811]: I0128 17:24:04.158934 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-4w45h" podStartSLOduration=5.158908017 podStartE2EDuration="5.158908017s" podCreationTimestamp="2026-01-28 17:23:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:24:04.151363742 +0000 UTC m=+5936.905727325" watchObservedRunningTime="2026-01-28 17:24:04.158908017 +0000 UTC m=+5936.913271600" Jan 28 17:24:04 crc kubenswrapper[4811]: I0128 17:24:04.930535 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:24:04 crc kubenswrapper[4811]: I0128 17:24:04.930890 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:24:08 crc kubenswrapper[4811]: I0128 17:24:08.349191 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:24:09 crc kubenswrapper[4811]: I0128 17:24:09.173481 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"3dea4adb7c8ad114e1ea2dd4f826ff466865b50d5f1c4a4806ff535fa6793f5b"} Jan 28 17:24:11 crc kubenswrapper[4811]: I0128 17:24:11.038225 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-h5pq8"] Jan 28 17:24:11 crc kubenswrapper[4811]: I0128 17:24:11.074509 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-h5pq8"] Jan 28 17:24:12 crc kubenswrapper[4811]: I0128 17:24:12.353197 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a" path="/var/lib/kubelet/pods/aa9cf2b4-84ec-4cc8-9882-433a3dff9d4a/volumes" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.562518 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-bpbqs"] Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.564187 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.573331 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-bpbqs"] Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.652028 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42475e6d-089d-42b4-9036-b625afeeb9fa-operator-scripts\") pod \"octavia-db-create-bpbqs\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.652072 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z49f\" (UniqueName: \"kubernetes.io/projected/42475e6d-089d-42b4-9036-b625afeeb9fa-kube-api-access-6z49f\") pod \"octavia-db-create-bpbqs\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.753986 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42475e6d-089d-42b4-9036-b625afeeb9fa-operator-scripts\") pod \"octavia-db-create-bpbqs\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.754032 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z49f\" (UniqueName: \"kubernetes.io/projected/42475e6d-089d-42b4-9036-b625afeeb9fa-kube-api-access-6z49f\") pod \"octavia-db-create-bpbqs\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.755084 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42475e6d-089d-42b4-9036-b625afeeb9fa-operator-scripts\") pod \"octavia-db-create-bpbqs\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.781137 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z49f\" (UniqueName: \"kubernetes.io/projected/42475e6d-089d-42b4-9036-b625afeeb9fa-kube-api-access-6z49f\") pod \"octavia-db-create-bpbqs\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:25 crc kubenswrapper[4811]: I0128 17:24:25.891083 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.389782 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-bpbqs"] Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.897688 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-5e0a-account-create-update-jd6sd"] Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.899247 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.901129 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.917853 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-5e0a-account-create-update-jd6sd"] Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.975811 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-operator-scripts\") pod \"octavia-5e0a-account-create-update-jd6sd\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:26 crc kubenswrapper[4811]: I0128 17:24:26.976199 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xvd9\" (UniqueName: \"kubernetes.io/projected/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-kube-api-access-5xvd9\") pod \"octavia-5e0a-account-create-update-jd6sd\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.078217 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xvd9\" (UniqueName: \"kubernetes.io/projected/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-kube-api-access-5xvd9\") pod \"octavia-5e0a-account-create-update-jd6sd\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.078340 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-operator-scripts\") pod \"octavia-5e0a-account-create-update-jd6sd\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.079062 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-operator-scripts\") pod \"octavia-5e0a-account-create-update-jd6sd\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.110681 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xvd9\" (UniqueName: \"kubernetes.io/projected/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-kube-api-access-5xvd9\") pod \"octavia-5e0a-account-create-update-jd6sd\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.220369 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.332139 4811 generic.go:334] "Generic (PLEG): container finished" podID="42475e6d-089d-42b4-9036-b625afeeb9fa" containerID="074c3bae02f7dce5a65ab1bf568832256a8b7a80534b483c11151f66feaafc13" exitCode=0 Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.332188 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-bpbqs" event={"ID":"42475e6d-089d-42b4-9036-b625afeeb9fa","Type":"ContainerDied","Data":"074c3bae02f7dce5a65ab1bf568832256a8b7a80534b483c11151f66feaafc13"} Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.332220 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-bpbqs" event={"ID":"42475e6d-089d-42b4-9036-b625afeeb9fa","Type":"ContainerStarted","Data":"cae20fe32368e3c08a1020326c2ffac20659ca25963e365d85df9dac1931d5cb"} Jan 28 17:24:27 crc kubenswrapper[4811]: I0128 17:24:27.670265 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-5e0a-account-create-update-jd6sd"] Jan 28 17:24:27 crc kubenswrapper[4811]: W0128 17:24:27.671670 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod407ae7f9_9fcc_4b3d_be4d_9bd254e95034.slice/crio-fdf80ae2e52231fa6c4c3fee4a552f1e6ccd8320666436931f87a9457fb6ce9e WatchSource:0}: Error finding container fdf80ae2e52231fa6c4c3fee4a552f1e6ccd8320666436931f87a9457fb6ce9e: Status 404 returned error can't find the container with id fdf80ae2e52231fa6c4c3fee4a552f1e6ccd8320666436931f87a9457fb6ce9e Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.341236 4811 generic.go:334] "Generic (PLEG): container finished" podID="407ae7f9-9fcc-4b3d-be4d-9bd254e95034" containerID="05784ba60a13e0781da8c52929cd672b3550c7f37982f1804445fdd5a7ac9875" exitCode=0 Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.348954 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-5e0a-account-create-update-jd6sd" event={"ID":"407ae7f9-9fcc-4b3d-be4d-9bd254e95034","Type":"ContainerDied","Data":"05784ba60a13e0781da8c52929cd672b3550c7f37982f1804445fdd5a7ac9875"} Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.348994 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-5e0a-account-create-update-jd6sd" event={"ID":"407ae7f9-9fcc-4b3d-be4d-9bd254e95034","Type":"ContainerStarted","Data":"fdf80ae2e52231fa6c4c3fee4a552f1e6ccd8320666436931f87a9457fb6ce9e"} Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.667121 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.708360 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42475e6d-089d-42b4-9036-b625afeeb9fa-operator-scripts\") pod \"42475e6d-089d-42b4-9036-b625afeeb9fa\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.708637 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6z49f\" (UniqueName: \"kubernetes.io/projected/42475e6d-089d-42b4-9036-b625afeeb9fa-kube-api-access-6z49f\") pod \"42475e6d-089d-42b4-9036-b625afeeb9fa\" (UID: \"42475e6d-089d-42b4-9036-b625afeeb9fa\") " Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.709148 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42475e6d-089d-42b4-9036-b625afeeb9fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "42475e6d-089d-42b4-9036-b625afeeb9fa" (UID: "42475e6d-089d-42b4-9036-b625afeeb9fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.709742 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42475e6d-089d-42b4-9036-b625afeeb9fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.713481 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42475e6d-089d-42b4-9036-b625afeeb9fa-kube-api-access-6z49f" (OuterVolumeSpecName: "kube-api-access-6z49f") pod "42475e6d-089d-42b4-9036-b625afeeb9fa" (UID: "42475e6d-089d-42b4-9036-b625afeeb9fa"). InnerVolumeSpecName "kube-api-access-6z49f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:24:28 crc kubenswrapper[4811]: I0128 17:24:28.811932 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6z49f\" (UniqueName: \"kubernetes.io/projected/42475e6d-089d-42b4-9036-b625afeeb9fa-kube-api-access-6z49f\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.352637 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-bpbqs" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.355546 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-bpbqs" event={"ID":"42475e6d-089d-42b4-9036-b625afeeb9fa","Type":"ContainerDied","Data":"cae20fe32368e3c08a1020326c2ffac20659ca25963e365d85df9dac1931d5cb"} Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.355583 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cae20fe32368e3c08a1020326c2ffac20659ca25963e365d85df9dac1931d5cb" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.690112 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.835292 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-operator-scripts\") pod \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.835414 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xvd9\" (UniqueName: \"kubernetes.io/projected/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-kube-api-access-5xvd9\") pod \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\" (UID: \"407ae7f9-9fcc-4b3d-be4d-9bd254e95034\") " Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.836110 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "407ae7f9-9fcc-4b3d-be4d-9bd254e95034" (UID: "407ae7f9-9fcc-4b3d-be4d-9bd254e95034"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.839698 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-kube-api-access-5xvd9" (OuterVolumeSpecName: "kube-api-access-5xvd9") pod "407ae7f9-9fcc-4b3d-be4d-9bd254e95034" (UID: "407ae7f9-9fcc-4b3d-be4d-9bd254e95034"). InnerVolumeSpecName "kube-api-access-5xvd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.938713 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:29 crc kubenswrapper[4811]: I0128 17:24:29.938755 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xvd9\" (UniqueName: \"kubernetes.io/projected/407ae7f9-9fcc-4b3d-be4d-9bd254e95034-kube-api-access-5xvd9\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:30 crc kubenswrapper[4811]: I0128 17:24:30.363141 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-5e0a-account-create-update-jd6sd" event={"ID":"407ae7f9-9fcc-4b3d-be4d-9bd254e95034","Type":"ContainerDied","Data":"fdf80ae2e52231fa6c4c3fee4a552f1e6ccd8320666436931f87a9457fb6ce9e"} Jan 28 17:24:30 crc kubenswrapper[4811]: I0128 17:24:30.363409 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdf80ae2e52231fa6c4c3fee4a552f1e6ccd8320666436931f87a9457fb6ce9e" Jan 28 17:24:30 crc kubenswrapper[4811]: I0128 17:24:30.363488 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-5e0a-account-create-update-jd6sd" Jan 28 17:24:31 crc kubenswrapper[4811]: I0128 17:24:31.498106 4811 scope.go:117] "RemoveContainer" containerID="77ff0980bc0f00ba861486350bd2b1ed7ad23137d7b7f6e986656c052ea6fbad" Jan 28 17:24:31 crc kubenswrapper[4811]: I0128 17:24:31.532960 4811 scope.go:117] "RemoveContainer" containerID="2e8d987cd6f83c706156d803de1081f5bfc5ec2df9fc7be106eded1131d5e417" Jan 28 17:24:31 crc kubenswrapper[4811]: I0128 17:24:31.566150 4811 scope.go:117] "RemoveContainer" containerID="cbe5f9bd21aecc43d58b2f68c963413df95f64c23f5ced8b8b29b403501cd12a" Jan 28 17:24:31 crc kubenswrapper[4811]: I0128 17:24:31.633240 4811 scope.go:117] "RemoveContainer" containerID="624824b7668483f016b76d55e52975342301626cacae5e5277d127d1ef1dc302" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.122541 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-zgmpv"] Jan 28 17:24:32 crc kubenswrapper[4811]: E0128 17:24:32.123061 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407ae7f9-9fcc-4b3d-be4d-9bd254e95034" containerName="mariadb-account-create-update" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.123088 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="407ae7f9-9fcc-4b3d-be4d-9bd254e95034" containerName="mariadb-account-create-update" Jan 28 17:24:32 crc kubenswrapper[4811]: E0128 17:24:32.123137 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42475e6d-089d-42b4-9036-b625afeeb9fa" containerName="mariadb-database-create" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.123146 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="42475e6d-089d-42b4-9036-b625afeeb9fa" containerName="mariadb-database-create" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.123365 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="42475e6d-089d-42b4-9036-b625afeeb9fa" containerName="mariadb-database-create" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.123397 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="407ae7f9-9fcc-4b3d-be4d-9bd254e95034" containerName="mariadb-account-create-update" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.124202 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.142966 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-zgmpv"] Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.183879 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4be526f-8d55-4d2c-bed8-08ac750c4df1-operator-scripts\") pod \"octavia-persistence-db-create-zgmpv\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.184294 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdcjx\" (UniqueName: \"kubernetes.io/projected/c4be526f-8d55-4d2c-bed8-08ac750c4df1-kube-api-access-mdcjx\") pod \"octavia-persistence-db-create-zgmpv\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.286328 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdcjx\" (UniqueName: \"kubernetes.io/projected/c4be526f-8d55-4d2c-bed8-08ac750c4df1-kube-api-access-mdcjx\") pod \"octavia-persistence-db-create-zgmpv\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.286827 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4be526f-8d55-4d2c-bed8-08ac750c4df1-operator-scripts\") pod \"octavia-persistence-db-create-zgmpv\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.287574 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4be526f-8d55-4d2c-bed8-08ac750c4df1-operator-scripts\") pod \"octavia-persistence-db-create-zgmpv\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.311316 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdcjx\" (UniqueName: \"kubernetes.io/projected/c4be526f-8d55-4d2c-bed8-08ac750c4df1-kube-api-access-mdcjx\") pod \"octavia-persistence-db-create-zgmpv\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.449482 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.613573 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-625d-account-create-update-px8pw"] Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.615346 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.618401 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.624607 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-625d-account-create-update-px8pw"] Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.697374 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc230ae4-d6f3-4bd8-99e1-9107b216450d-operator-scripts\") pod \"octavia-625d-account-create-update-px8pw\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.697774 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xhlw\" (UniqueName: \"kubernetes.io/projected/cc230ae4-d6f3-4bd8-99e1-9107b216450d-kube-api-access-5xhlw\") pod \"octavia-625d-account-create-update-px8pw\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.799793 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xhlw\" (UniqueName: \"kubernetes.io/projected/cc230ae4-d6f3-4bd8-99e1-9107b216450d-kube-api-access-5xhlw\") pod \"octavia-625d-account-create-update-px8pw\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.799899 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc230ae4-d6f3-4bd8-99e1-9107b216450d-operator-scripts\") pod \"octavia-625d-account-create-update-px8pw\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.801092 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc230ae4-d6f3-4bd8-99e1-9107b216450d-operator-scripts\") pod \"octavia-625d-account-create-update-px8pw\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.819789 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xhlw\" (UniqueName: \"kubernetes.io/projected/cc230ae4-d6f3-4bd8-99e1-9107b216450d-kube-api-access-5xhlw\") pod \"octavia-625d-account-create-update-px8pw\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.941704 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:32 crc kubenswrapper[4811]: I0128 17:24:32.965883 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-zgmpv"] Jan 28 17:24:33 crc kubenswrapper[4811]: I0128 17:24:33.444591 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-zgmpv" event={"ID":"c4be526f-8d55-4d2c-bed8-08ac750c4df1","Type":"ContainerStarted","Data":"d0d71533591f8c9b33e17be37c791ff186731024c0e172743e5a8dd360d3ff1a"} Jan 28 17:24:33 crc kubenswrapper[4811]: I0128 17:24:33.444845 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-zgmpv" event={"ID":"c4be526f-8d55-4d2c-bed8-08ac750c4df1","Type":"ContainerStarted","Data":"5d93b007287885da382f4a638870a5a6f5e47ea664797290ad9c24f1f8cbedcc"} Jan 28 17:24:33 crc kubenswrapper[4811]: I0128 17:24:33.491977 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-625d-account-create-update-px8pw"] Jan 28 17:24:33 crc kubenswrapper[4811]: I0128 17:24:33.493991 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-persistence-db-create-zgmpv" podStartSLOduration=1.493971337 podStartE2EDuration="1.493971337s" podCreationTimestamp="2026-01-28 17:24:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:24:33.488698364 +0000 UTC m=+5966.243061947" watchObservedRunningTime="2026-01-28 17:24:33.493971337 +0000 UTC m=+5966.248334930" Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.454328 4811 generic.go:334] "Generic (PLEG): container finished" podID="cc230ae4-d6f3-4bd8-99e1-9107b216450d" containerID="336d9036bbade2da460be404e5af8997e17f8a9c55b035942ef38930391755a1" exitCode=0 Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.454730 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-625d-account-create-update-px8pw" event={"ID":"cc230ae4-d6f3-4bd8-99e1-9107b216450d","Type":"ContainerDied","Data":"336d9036bbade2da460be404e5af8997e17f8a9c55b035942ef38930391755a1"} Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.454765 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-625d-account-create-update-px8pw" event={"ID":"cc230ae4-d6f3-4bd8-99e1-9107b216450d","Type":"ContainerStarted","Data":"50928a57b130d13a08b7445e226c3caf300894c0ef05b6b35fca871d266ecf73"} Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.458054 4811 generic.go:334] "Generic (PLEG): container finished" podID="c4be526f-8d55-4d2c-bed8-08ac750c4df1" containerID="d0d71533591f8c9b33e17be37c791ff186731024c0e172743e5a8dd360d3ff1a" exitCode=0 Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.458111 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-zgmpv" event={"ID":"c4be526f-8d55-4d2c-bed8-08ac750c4df1","Type":"ContainerDied","Data":"d0d71533591f8c9b33e17be37c791ff186731024c0e172743e5a8dd360d3ff1a"} Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.953286 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-6zndg" podUID="7bf5f5e6-489a-4e4a-9974-e0d4c029534e" containerName="ovn-controller" probeResult="failure" output=< Jan 28 17:24:34 crc kubenswrapper[4811]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 28 17:24:34 crc kubenswrapper[4811]: > Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.967860 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:24:34 crc kubenswrapper[4811]: I0128 17:24:34.970581 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-4w45h" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.076887 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-6zndg-config-nwxsw"] Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.078402 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.082356 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.131920 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6zndg-config-nwxsw"] Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.143071 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run-ovn\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.143166 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-log-ovn\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.143261 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.143282 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-additional-scripts\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.143320 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-scripts\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.143347 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msgfj\" (UniqueName: \"kubernetes.io/projected/34119f42-3f46-45d8-83eb-07dc71233757-kube-api-access-msgfj\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.245859 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.245952 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-additional-scripts\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.246155 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.246840 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-additional-scripts\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.246403 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-scripts\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.247647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msgfj\" (UniqueName: \"kubernetes.io/projected/34119f42-3f46-45d8-83eb-07dc71233757-kube-api-access-msgfj\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.247795 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run-ovn\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.247861 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run-ovn\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.247983 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-log-ovn\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.248148 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-log-ovn\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.248545 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-scripts\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.271335 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msgfj\" (UniqueName: \"kubernetes.io/projected/34119f42-3f46-45d8-83eb-07dc71233757-kube-api-access-msgfj\") pod \"ovn-controller-6zndg-config-nwxsw\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.400337 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.894354 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6zndg-config-nwxsw"] Jan 28 17:24:35 crc kubenswrapper[4811]: W0128 17:24:35.915338 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34119f42_3f46_45d8_83eb_07dc71233757.slice/crio-dc4f2b796192cf37c91c71a835b1219b058f8db979cd0311c2af02305f851c1e WatchSource:0}: Error finding container dc4f2b796192cf37c91c71a835b1219b058f8db979cd0311c2af02305f851c1e: Status 404 returned error can't find the container with id dc4f2b796192cf37c91c71a835b1219b058f8db979cd0311c2af02305f851c1e Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.971578 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:35 crc kubenswrapper[4811]: I0128 17:24:35.982919 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.063954 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4be526f-8d55-4d2c-bed8-08ac750c4df1-operator-scripts\") pod \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.064095 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc230ae4-d6f3-4bd8-99e1-9107b216450d-operator-scripts\") pod \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.064119 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xhlw\" (UniqueName: \"kubernetes.io/projected/cc230ae4-d6f3-4bd8-99e1-9107b216450d-kube-api-access-5xhlw\") pod \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\" (UID: \"cc230ae4-d6f3-4bd8-99e1-9107b216450d\") " Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.064150 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdcjx\" (UniqueName: \"kubernetes.io/projected/c4be526f-8d55-4d2c-bed8-08ac750c4df1-kube-api-access-mdcjx\") pod \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\" (UID: \"c4be526f-8d55-4d2c-bed8-08ac750c4df1\") " Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.064826 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4be526f-8d55-4d2c-bed8-08ac750c4df1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4be526f-8d55-4d2c-bed8-08ac750c4df1" (UID: "c4be526f-8d55-4d2c-bed8-08ac750c4df1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.065549 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc230ae4-d6f3-4bd8-99e1-9107b216450d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cc230ae4-d6f3-4bd8-99e1-9107b216450d" (UID: "cc230ae4-d6f3-4bd8-99e1-9107b216450d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.065798 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc230ae4-d6f3-4bd8-99e1-9107b216450d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.065838 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4be526f-8d55-4d2c-bed8-08ac750c4df1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.072706 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc230ae4-d6f3-4bd8-99e1-9107b216450d-kube-api-access-5xhlw" (OuterVolumeSpecName: "kube-api-access-5xhlw") pod "cc230ae4-d6f3-4bd8-99e1-9107b216450d" (UID: "cc230ae4-d6f3-4bd8-99e1-9107b216450d"). InnerVolumeSpecName "kube-api-access-5xhlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.078777 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4be526f-8d55-4d2c-bed8-08ac750c4df1-kube-api-access-mdcjx" (OuterVolumeSpecName: "kube-api-access-mdcjx") pod "c4be526f-8d55-4d2c-bed8-08ac750c4df1" (UID: "c4be526f-8d55-4d2c-bed8-08ac750c4df1"). InnerVolumeSpecName "kube-api-access-mdcjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.168661 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xhlw\" (UniqueName: \"kubernetes.io/projected/cc230ae4-d6f3-4bd8-99e1-9107b216450d-kube-api-access-5xhlw\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.168701 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdcjx\" (UniqueName: \"kubernetes.io/projected/c4be526f-8d55-4d2c-bed8-08ac750c4df1-kube-api-access-mdcjx\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.478237 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-625d-account-create-update-px8pw" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.478232 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-625d-account-create-update-px8pw" event={"ID":"cc230ae4-d6f3-4bd8-99e1-9107b216450d","Type":"ContainerDied","Data":"50928a57b130d13a08b7445e226c3caf300894c0ef05b6b35fca871d266ecf73"} Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.478370 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50928a57b130d13a08b7445e226c3caf300894c0ef05b6b35fca871d266ecf73" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.484009 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-zgmpv" event={"ID":"c4be526f-8d55-4d2c-bed8-08ac750c4df1","Type":"ContainerDied","Data":"5d93b007287885da382f4a638870a5a6f5e47ea664797290ad9c24f1f8cbedcc"} Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.484027 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-zgmpv" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.484038 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d93b007287885da382f4a638870a5a6f5e47ea664797290ad9c24f1f8cbedcc" Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.486109 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6zndg-config-nwxsw" event={"ID":"34119f42-3f46-45d8-83eb-07dc71233757","Type":"ContainerStarted","Data":"f7b0f301f999bd32dd4cb871a158852f7a0d128b505f16a0edbd22dfa1209757"} Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.486148 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6zndg-config-nwxsw" event={"ID":"34119f42-3f46-45d8-83eb-07dc71233757","Type":"ContainerStarted","Data":"dc4f2b796192cf37c91c71a835b1219b058f8db979cd0311c2af02305f851c1e"} Jan 28 17:24:36 crc kubenswrapper[4811]: I0128 17:24:36.505283 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-6zndg-config-nwxsw" podStartSLOduration=1.505264129 podStartE2EDuration="1.505264129s" podCreationTimestamp="2026-01-28 17:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:24:36.505138756 +0000 UTC m=+5969.259502339" watchObservedRunningTime="2026-01-28 17:24:36.505264129 +0000 UTC m=+5969.259627712" Jan 28 17:24:37 crc kubenswrapper[4811]: I0128 17:24:37.498839 4811 generic.go:334] "Generic (PLEG): container finished" podID="34119f42-3f46-45d8-83eb-07dc71233757" containerID="f7b0f301f999bd32dd4cb871a158852f7a0d128b505f16a0edbd22dfa1209757" exitCode=0 Jan 28 17:24:37 crc kubenswrapper[4811]: I0128 17:24:37.498901 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6zndg-config-nwxsw" event={"ID":"34119f42-3f46-45d8-83eb-07dc71233757","Type":"ContainerDied","Data":"f7b0f301f999bd32dd4cb871a158852f7a0d128b505f16a0edbd22dfa1209757"} Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.503167 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-98d866f5b-p8btt"] Jan 28 17:24:38 crc kubenswrapper[4811]: E0128 17:24:38.503936 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4be526f-8d55-4d2c-bed8-08ac750c4df1" containerName="mariadb-database-create" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.503954 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4be526f-8d55-4d2c-bed8-08ac750c4df1" containerName="mariadb-database-create" Jan 28 17:24:38 crc kubenswrapper[4811]: E0128 17:24:38.503978 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc230ae4-d6f3-4bd8-99e1-9107b216450d" containerName="mariadb-account-create-update" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.503987 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc230ae4-d6f3-4bd8-99e1-9107b216450d" containerName="mariadb-account-create-update" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.504185 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc230ae4-d6f3-4bd8-99e1-9107b216450d" containerName="mariadb-account-create-update" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.504199 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4be526f-8d55-4d2c-bed8-08ac750c4df1" containerName="mariadb-database-create" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.506242 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.513271 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.513721 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.519980 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-qfpfx" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.533733 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-98d866f5b-p8btt"] Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.619859 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-combined-ca-bundle\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.619949 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-config-data-merged\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.619986 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-octavia-run\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.620024 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-scripts\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.620077 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-config-data\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.722358 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-scripts\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.722466 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-config-data\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.722528 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-combined-ca-bundle\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.722610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-config-data-merged\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.722648 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-octavia-run\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.723085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-octavia-run\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.725917 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-config-data-merged\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.736219 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-scripts\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.736980 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-config-data\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.737748 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4634aca9-e4c6-4fbb-aba2-ab011d320ca0-combined-ca-bundle\") pod \"octavia-api-98d866f5b-p8btt\" (UID: \"4634aca9-e4c6-4fbb-aba2-ab011d320ca0\") " pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.819476 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.831490 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.925567 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-additional-scripts\") pod \"34119f42-3f46-45d8-83eb-07dc71233757\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.925625 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-scripts\") pod \"34119f42-3f46-45d8-83eb-07dc71233757\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.925696 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msgfj\" (UniqueName: \"kubernetes.io/projected/34119f42-3f46-45d8-83eb-07dc71233757-kube-api-access-msgfj\") pod \"34119f42-3f46-45d8-83eb-07dc71233757\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.925724 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run-ovn\") pod \"34119f42-3f46-45d8-83eb-07dc71233757\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.925782 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run\") pod \"34119f42-3f46-45d8-83eb-07dc71233757\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.925903 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-log-ovn\") pod \"34119f42-3f46-45d8-83eb-07dc71233757\" (UID: \"34119f42-3f46-45d8-83eb-07dc71233757\") " Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.926319 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "34119f42-3f46-45d8-83eb-07dc71233757" (UID: "34119f42-3f46-45d8-83eb-07dc71233757"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.927249 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-scripts" (OuterVolumeSpecName: "scripts") pod "34119f42-3f46-45d8-83eb-07dc71233757" (UID: "34119f42-3f46-45d8-83eb-07dc71233757"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.927297 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "34119f42-3f46-45d8-83eb-07dc71233757" (UID: "34119f42-3f46-45d8-83eb-07dc71233757"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.927321 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run" (OuterVolumeSpecName: "var-run") pod "34119f42-3f46-45d8-83eb-07dc71233757" (UID: "34119f42-3f46-45d8-83eb-07dc71233757"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.927400 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "34119f42-3f46-45d8-83eb-07dc71233757" (UID: "34119f42-3f46-45d8-83eb-07dc71233757"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:24:38 crc kubenswrapper[4811]: I0128 17:24:38.933585 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34119f42-3f46-45d8-83eb-07dc71233757-kube-api-access-msgfj" (OuterVolumeSpecName: "kube-api-access-msgfj") pod "34119f42-3f46-45d8-83eb-07dc71233757" (UID: "34119f42-3f46-45d8-83eb-07dc71233757"). InnerVolumeSpecName "kube-api-access-msgfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.030667 4811 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.030893 4811 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.030904 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34119f42-3f46-45d8-83eb-07dc71233757-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.030912 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msgfj\" (UniqueName: \"kubernetes.io/projected/34119f42-3f46-45d8-83eb-07dc71233757-kube-api-access-msgfj\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.030921 4811 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.030929 4811 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34119f42-3f46-45d8-83eb-07dc71233757-var-run\") on node \"crc\" DevicePath \"\"" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.380599 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-98d866f5b-p8btt"] Jan 28 17:24:39 crc kubenswrapper[4811]: W0128 17:24:39.381253 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4634aca9_e4c6_4fbb_aba2_ab011d320ca0.slice/crio-0ba42bcde6f9aa3dd50482b058fddddb65770e01d7f6f58095d1f136a92998a5 WatchSource:0}: Error finding container 0ba42bcde6f9aa3dd50482b058fddddb65770e01d7f6f58095d1f136a92998a5: Status 404 returned error can't find the container with id 0ba42bcde6f9aa3dd50482b058fddddb65770e01d7f6f58095d1f136a92998a5 Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.384033 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.517768 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-98d866f5b-p8btt" event={"ID":"4634aca9-e4c6-4fbb-aba2-ab011d320ca0","Type":"ContainerStarted","Data":"0ba42bcde6f9aa3dd50482b058fddddb65770e01d7f6f58095d1f136a92998a5"} Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.520792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6zndg-config-nwxsw" event={"ID":"34119f42-3f46-45d8-83eb-07dc71233757","Type":"ContainerDied","Data":"dc4f2b796192cf37c91c71a835b1219b058f8db979cd0311c2af02305f851c1e"} Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.520833 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc4f2b796192cf37c91c71a835b1219b058f8db979cd0311c2af02305f851c1e" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.520894 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6zndg-config-nwxsw" Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.611316 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-6zndg-config-nwxsw"] Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.619036 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-6zndg-config-nwxsw"] Jan 28 17:24:39 crc kubenswrapper[4811]: I0128 17:24:39.967058 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-6zndg" Jan 28 17:24:40 crc kubenswrapper[4811]: I0128 17:24:40.355888 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34119f42-3f46-45d8-83eb-07dc71233757" path="/var/lib/kubelet/pods/34119f42-3f46-45d8-83eb-07dc71233757/volumes" Jan 28 17:24:50 crc kubenswrapper[4811]: I0128 17:24:50.640899 4811 generic.go:334] "Generic (PLEG): container finished" podID="4634aca9-e4c6-4fbb-aba2-ab011d320ca0" containerID="18fbc580d8a3c121950598c2410b90f1712c37ff59723f7f6b322ca56b51a178" exitCode=0 Jan 28 17:24:50 crc kubenswrapper[4811]: I0128 17:24:50.640937 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-98d866f5b-p8btt" event={"ID":"4634aca9-e4c6-4fbb-aba2-ab011d320ca0","Type":"ContainerDied","Data":"18fbc580d8a3c121950598c2410b90f1712c37ff59723f7f6b322ca56b51a178"} Jan 28 17:24:51 crc kubenswrapper[4811]: I0128 17:24:51.651367 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-98d866f5b-p8btt" event={"ID":"4634aca9-e4c6-4fbb-aba2-ab011d320ca0","Type":"ContainerStarted","Data":"e9490754cc3da0d0d4d2676f38d1f60dc1dc5c65e9b0ebcdab32d3ff15fa7063"} Jan 28 17:24:51 crc kubenswrapper[4811]: I0128 17:24:51.651740 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:51 crc kubenswrapper[4811]: I0128 17:24:51.651756 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-98d866f5b-p8btt" event={"ID":"4634aca9-e4c6-4fbb-aba2-ab011d320ca0","Type":"ContainerStarted","Data":"40c23c2f7ac97d97990f9cf373d949495844ff258a5a89d89ccef9aa26018559"} Jan 28 17:24:51 crc kubenswrapper[4811]: I0128 17:24:51.651770 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:24:51 crc kubenswrapper[4811]: I0128 17:24:51.676564 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-98d866f5b-p8btt" podStartSLOduration=3.429538282 podStartE2EDuration="13.67654706s" podCreationTimestamp="2026-01-28 17:24:38 +0000 UTC" firstStartedPulling="2026-01-28 17:24:39.383777556 +0000 UTC m=+5972.138141139" lastFinishedPulling="2026-01-28 17:24:49.630786334 +0000 UTC m=+5982.385149917" observedRunningTime="2026-01-28 17:24:51.668600183 +0000 UTC m=+5984.422963766" watchObservedRunningTime="2026-01-28 17:24:51.67654706 +0000 UTC m=+5984.430910643" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.643863 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-nc627"] Jan 28 17:24:59 crc kubenswrapper[4811]: E0128 17:24:59.644796 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34119f42-3f46-45d8-83eb-07dc71233757" containerName="ovn-config" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.644810 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="34119f42-3f46-45d8-83eb-07dc71233757" containerName="ovn-config" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.645005 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="34119f42-3f46-45d8-83eb-07dc71233757" containerName="ovn-config" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.646004 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.651735 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.651806 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.652186 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.659975 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-nc627"] Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.743497 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/88cbdf9d-4e65-4269-bf33-ad4a6842812a-config-data-merged\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.743587 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/88cbdf9d-4e65-4269-bf33-ad4a6842812a-hm-ports\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.743696 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88cbdf9d-4e65-4269-bf33-ad4a6842812a-config-data\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.743738 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88cbdf9d-4e65-4269-bf33-ad4a6842812a-scripts\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.845839 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/88cbdf9d-4e65-4269-bf33-ad4a6842812a-config-data-merged\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.845929 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/88cbdf9d-4e65-4269-bf33-ad4a6842812a-hm-ports\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.846115 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88cbdf9d-4e65-4269-bf33-ad4a6842812a-config-data\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.846174 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88cbdf9d-4e65-4269-bf33-ad4a6842812a-scripts\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.846885 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/88cbdf9d-4e65-4269-bf33-ad4a6842812a-hm-ports\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.847169 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/88cbdf9d-4e65-4269-bf33-ad4a6842812a-config-data-merged\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.852995 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88cbdf9d-4e65-4269-bf33-ad4a6842812a-config-data\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.853265 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88cbdf9d-4e65-4269-bf33-ad4a6842812a-scripts\") pod \"octavia-rsyslog-nc627\" (UID: \"88cbdf9d-4e65-4269-bf33-ad4a6842812a\") " pod="openstack/octavia-rsyslog-nc627" Jan 28 17:24:59 crc kubenswrapper[4811]: I0128 17:24:59.967003 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-nc627" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.193721 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-rfm6m"] Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.195602 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.199552 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.210375 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-rfm6m"] Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.255177 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8ec0f7d1-06e9-4518-ab30-82ab6807056d-httpd-config\") pod \"octavia-image-upload-65dd99cb46-rfm6m\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.255236 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/8ec0f7d1-06e9-4518-ab30-82ab6807056d-amphora-image\") pod \"octavia-image-upload-65dd99cb46-rfm6m\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.356830 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8ec0f7d1-06e9-4518-ab30-82ab6807056d-httpd-config\") pod \"octavia-image-upload-65dd99cb46-rfm6m\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.356883 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/8ec0f7d1-06e9-4518-ab30-82ab6807056d-amphora-image\") pod \"octavia-image-upload-65dd99cb46-rfm6m\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.357478 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/8ec0f7d1-06e9-4518-ab30-82ab6807056d-amphora-image\") pod \"octavia-image-upload-65dd99cb46-rfm6m\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.381975 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8ec0f7d1-06e9-4518-ab30-82ab6807056d-httpd-config\") pod \"octavia-image-upload-65dd99cb46-rfm6m\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.515653 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.530365 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-nc627"] Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.701806 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-nc627"] Jan 28 17:25:00 crc kubenswrapper[4811]: I0128 17:25:00.732952 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nc627" event={"ID":"88cbdf9d-4e65-4269-bf33-ad4a6842812a","Type":"ContainerStarted","Data":"1f1c2eab5987603a6b3d722878c6087833729b00e482a5c21c7bbc7b196bb02d"} Jan 28 17:25:01 crc kubenswrapper[4811]: I0128 17:25:01.042360 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-rfm6m"] Jan 28 17:25:01 crc kubenswrapper[4811]: I0128 17:25:01.747023 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" event={"ID":"8ec0f7d1-06e9-4518-ab30-82ab6807056d","Type":"ContainerStarted","Data":"16af27b664ebb41eb8a0351aff6d6027d80d170fdec87e96dac5b5bd22401f78"} Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.550260 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-24c7x"] Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.552167 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.554524 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.563188 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-24c7x"] Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.652900 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-scripts\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.652967 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-combined-ca-bundle\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.653156 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.653476 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data-merged\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.755627 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-scripts\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.755715 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-combined-ca-bundle\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.756647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.756746 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data-merged\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.757372 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data-merged\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.762269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-combined-ca-bundle\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.768316 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-scripts\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.773375 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data\") pod \"octavia-db-sync-24c7x\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:04 crc kubenswrapper[4811]: I0128 17:25:04.939295 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:05 crc kubenswrapper[4811]: I0128 17:25:05.474939 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-24c7x"] Jan 28 17:25:05 crc kubenswrapper[4811]: W0128 17:25:05.481589 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dbe153f_0524_407e_8a1a_61cce4f22eeb.slice/crio-aa09269d6fc992917e509d73c27079458db9d508095924af2e9defcae7496823 WatchSource:0}: Error finding container aa09269d6fc992917e509d73c27079458db9d508095924af2e9defcae7496823: Status 404 returned error can't find the container with id aa09269d6fc992917e509d73c27079458db9d508095924af2e9defcae7496823 Jan 28 17:25:05 crc kubenswrapper[4811]: I0128 17:25:05.798749 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-24c7x" event={"ID":"2dbe153f-0524-407e-8a1a-61cce4f22eeb","Type":"ContainerStarted","Data":"aa09269d6fc992917e509d73c27079458db9d508095924af2e9defcae7496823"} Jan 28 17:25:05 crc kubenswrapper[4811]: I0128 17:25:05.801015 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nc627" event={"ID":"88cbdf9d-4e65-4269-bf33-ad4a6842812a","Type":"ContainerStarted","Data":"00a3532cd344c8413540f3129b227a13a512a9ac1bb358ce6e02f10236d9e991"} Jan 28 17:25:09 crc kubenswrapper[4811]: I0128 17:25:09.835940 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-24c7x" event={"ID":"2dbe153f-0524-407e-8a1a-61cce4f22eeb","Type":"ContainerStarted","Data":"ada503a25046c67f1294b9915a959cd74bc9f8e6728e6813efbe18172c232f7c"} Jan 28 17:25:10 crc kubenswrapper[4811]: I0128 17:25:10.852877 4811 generic.go:334] "Generic (PLEG): container finished" podID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerID="ada503a25046c67f1294b9915a959cd74bc9f8e6728e6813efbe18172c232f7c" exitCode=0 Jan 28 17:25:10 crc kubenswrapper[4811]: I0128 17:25:10.852954 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-24c7x" event={"ID":"2dbe153f-0524-407e-8a1a-61cce4f22eeb","Type":"ContainerDied","Data":"ada503a25046c67f1294b9915a959cd74bc9f8e6728e6813efbe18172c232f7c"} Jan 28 17:25:12 crc kubenswrapper[4811]: I0128 17:25:12.876980 4811 generic.go:334] "Generic (PLEG): container finished" podID="88cbdf9d-4e65-4269-bf33-ad4a6842812a" containerID="00a3532cd344c8413540f3129b227a13a512a9ac1bb358ce6e02f10236d9e991" exitCode=0 Jan 28 17:25:12 crc kubenswrapper[4811]: I0128 17:25:12.877079 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nc627" event={"ID":"88cbdf9d-4e65-4269-bf33-ad4a6842812a","Type":"ContainerDied","Data":"00a3532cd344c8413540f3129b227a13a512a9ac1bb358ce6e02f10236d9e991"} Jan 28 17:25:15 crc kubenswrapper[4811]: I0128 17:25:15.881603 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:25:15 crc kubenswrapper[4811]: I0128 17:25:15.892823 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-98d866f5b-p8btt" Jan 28 17:25:16 crc kubenswrapper[4811]: E0128 17:25:16.940038 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/gthiemonge/octavia-amphora-image:latest" Jan 28 17:25:16 crc kubenswrapper[4811]: E0128 17:25:16.940682 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/gthiemonge/octavia-amphora-image,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:DEST_DIR,Value:/usr/local/apache2/htdocs,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:amphora-image,ReadOnly:false,MountPath:/usr/local/apache2/htdocs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-image-upload-65dd99cb46-rfm6m_openstack(8ec0f7d1-06e9-4518-ab30-82ab6807056d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 17:25:16 crc kubenswrapper[4811]: E0128 17:25:16.942509 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" Jan 28 17:25:17 crc kubenswrapper[4811]: I0128 17:25:17.937545 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-24c7x" event={"ID":"2dbe153f-0524-407e-8a1a-61cce4f22eeb","Type":"ContainerStarted","Data":"e27135a2724e31bd3b95d4467b3d0608c863bd2f663c90259f22139577819d5c"} Jan 28 17:25:17 crc kubenswrapper[4811]: I0128 17:25:17.941986 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nc627" event={"ID":"88cbdf9d-4e65-4269-bf33-ad4a6842812a","Type":"ContainerStarted","Data":"f8ba8659fe7e6bd259429545eb9b990ca0cd2617f0fa95baad4d830b0ac31355"} Jan 28 17:25:17 crc kubenswrapper[4811]: I0128 17:25:17.942456 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-nc627" Jan 28 17:25:17 crc kubenswrapper[4811]: E0128 17:25:17.943517 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/gthiemonge/octavia-amphora-image\\\"\"" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" Jan 28 17:25:17 crc kubenswrapper[4811]: I0128 17:25:17.959345 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-24c7x" podStartSLOduration=13.959329043 podStartE2EDuration="13.959329043s" podCreationTimestamp="2026-01-28 17:25:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:25:17.953084833 +0000 UTC m=+6010.707448416" watchObservedRunningTime="2026-01-28 17:25:17.959329043 +0000 UTC m=+6010.713692626" Jan 28 17:25:18 crc kubenswrapper[4811]: I0128 17:25:18.001246 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-nc627" podStartSLOduration=2.35491289 podStartE2EDuration="19.001222044s" podCreationTimestamp="2026-01-28 17:24:59 +0000 UTC" firstStartedPulling="2026-01-28 17:25:00.568737259 +0000 UTC m=+5993.323100832" lastFinishedPulling="2026-01-28 17:25:17.215046403 +0000 UTC m=+6009.969409986" observedRunningTime="2026-01-28 17:25:17.99853714 +0000 UTC m=+6010.752900733" watchObservedRunningTime="2026-01-28 17:25:18.001222044 +0000 UTC m=+6010.755585627" Jan 28 17:25:19 crc kubenswrapper[4811]: I0128 17:25:19.971166 4811 generic.go:334] "Generic (PLEG): container finished" podID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerID="e27135a2724e31bd3b95d4467b3d0608c863bd2f663c90259f22139577819d5c" exitCode=0 Jan 28 17:25:19 crc kubenswrapper[4811]: I0128 17:25:19.971238 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-24c7x" event={"ID":"2dbe153f-0524-407e-8a1a-61cce4f22eeb","Type":"ContainerDied","Data":"e27135a2724e31bd3b95d4467b3d0608c863bd2f663c90259f22139577819d5c"} Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.344465 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.507303 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-scripts\") pod \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.507376 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data-merged\") pod \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.507518 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-combined-ca-bundle\") pod \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.507636 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data\") pod \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\" (UID: \"2dbe153f-0524-407e-8a1a-61cce4f22eeb\") " Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.513569 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data" (OuterVolumeSpecName: "config-data") pod "2dbe153f-0524-407e-8a1a-61cce4f22eeb" (UID: "2dbe153f-0524-407e-8a1a-61cce4f22eeb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.531565 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-scripts" (OuterVolumeSpecName: "scripts") pod "2dbe153f-0524-407e-8a1a-61cce4f22eeb" (UID: "2dbe153f-0524-407e-8a1a-61cce4f22eeb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.540896 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "2dbe153f-0524-407e-8a1a-61cce4f22eeb" (UID: "2dbe153f-0524-407e-8a1a-61cce4f22eeb"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.544262 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2dbe153f-0524-407e-8a1a-61cce4f22eeb" (UID: "2dbe153f-0524-407e-8a1a-61cce4f22eeb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.610409 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.610480 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.610493 4811 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2dbe153f-0524-407e-8a1a-61cce4f22eeb-config-data-merged\") on node \"crc\" DevicePath \"\"" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.610507 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dbe153f-0524-407e-8a1a-61cce4f22eeb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.993564 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-24c7x" event={"ID":"2dbe153f-0524-407e-8a1a-61cce4f22eeb","Type":"ContainerDied","Data":"aa09269d6fc992917e509d73c27079458db9d508095924af2e9defcae7496823"} Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.993604 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa09269d6fc992917e509d73c27079458db9d508095924af2e9defcae7496823" Jan 28 17:25:21 crc kubenswrapper[4811]: I0128 17:25:21.993664 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-24c7x" Jan 28 17:25:30 crc kubenswrapper[4811]: I0128 17:25:30.002770 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-nc627" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.315493 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rgdmv"] Jan 28 17:25:38 crc kubenswrapper[4811]: E0128 17:25:38.316698 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerName="octavia-db-sync" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.316719 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerName="octavia-db-sync" Jan 28 17:25:38 crc kubenswrapper[4811]: E0128 17:25:38.316760 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerName="init" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.316772 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerName="init" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.317097 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" containerName="octavia-db-sync" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.321368 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.325904 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rgdmv"] Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.433935 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-utilities\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.434320 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-catalog-content\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.434628 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tf98\" (UniqueName: \"kubernetes.io/projected/2717f028-9fd4-4280-9f59-fb917b8ad100-kube-api-access-5tf98\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.535987 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tf98\" (UniqueName: \"kubernetes.io/projected/2717f028-9fd4-4280-9f59-fb917b8ad100-kube-api-access-5tf98\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.536390 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-utilities\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.536537 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-catalog-content\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.537027 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-utilities\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.537243 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-catalog-content\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.555849 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tf98\" (UniqueName: \"kubernetes.io/projected/2717f028-9fd4-4280-9f59-fb917b8ad100-kube-api-access-5tf98\") pod \"certified-operators-rgdmv\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:38 crc kubenswrapper[4811]: I0128 17:25:38.656624 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:41 crc kubenswrapper[4811]: I0128 17:25:41.832321 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rgdmv"] Jan 28 17:25:41 crc kubenswrapper[4811]: W0128 17:25:41.924993 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2717f028_9fd4_4280_9f59_fb917b8ad100.slice/crio-ffbcf626934c6a4d7c4700b412d6565da1aa03f0cf30d24900a955609d0a905c WatchSource:0}: Error finding container ffbcf626934c6a4d7c4700b412d6565da1aa03f0cf30d24900a955609d0a905c: Status 404 returned error can't find the container with id ffbcf626934c6a4d7c4700b412d6565da1aa03f0cf30d24900a955609d0a905c Jan 28 17:25:42 crc kubenswrapper[4811]: I0128 17:25:42.179312 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerStarted","Data":"ffbcf626934c6a4d7c4700b412d6565da1aa03f0cf30d24900a955609d0a905c"} Jan 28 17:25:43 crc kubenswrapper[4811]: I0128 17:25:43.189539 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" event={"ID":"8ec0f7d1-06e9-4518-ab30-82ab6807056d","Type":"ContainerStarted","Data":"e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be"} Jan 28 17:25:43 crc kubenswrapper[4811]: I0128 17:25:43.191174 4811 generic.go:334] "Generic (PLEG): container finished" podID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerID="86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554" exitCode=0 Jan 28 17:25:43 crc kubenswrapper[4811]: I0128 17:25:43.191219 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerDied","Data":"86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554"} Jan 28 17:25:46 crc kubenswrapper[4811]: I0128 17:25:46.218218 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerStarted","Data":"b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611"} Jan 28 17:25:48 crc kubenswrapper[4811]: I0128 17:25:48.280585 4811 generic.go:334] "Generic (PLEG): container finished" podID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerID="b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611" exitCode=0 Jan 28 17:25:48 crc kubenswrapper[4811]: I0128 17:25:48.281228 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerDied","Data":"b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611"} Jan 28 17:25:53 crc kubenswrapper[4811]: I0128 17:25:53.331965 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerStarted","Data":"86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5"} Jan 28 17:25:53 crc kubenswrapper[4811]: I0128 17:25:53.365580 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rgdmv" podStartSLOduration=5.981124357 podStartE2EDuration="15.365556233s" podCreationTimestamp="2026-01-28 17:25:38 +0000 UTC" firstStartedPulling="2026-01-28 17:25:43.193126725 +0000 UTC m=+6035.947490308" lastFinishedPulling="2026-01-28 17:25:52.577558601 +0000 UTC m=+6045.331922184" observedRunningTime="2026-01-28 17:25:53.355829648 +0000 UTC m=+6046.110193231" watchObservedRunningTime="2026-01-28 17:25:53.365556233 +0000 UTC m=+6046.119919816" Jan 28 17:25:56 crc kubenswrapper[4811]: I0128 17:25:56.357143 4811 generic.go:334] "Generic (PLEG): container finished" podID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerID="e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be" exitCode=0 Jan 28 17:25:56 crc kubenswrapper[4811]: I0128 17:25:56.357245 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" event={"ID":"8ec0f7d1-06e9-4518-ab30-82ab6807056d","Type":"ContainerDied","Data":"e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be"} Jan 28 17:25:57 crc kubenswrapper[4811]: I0128 17:25:57.368526 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" event={"ID":"8ec0f7d1-06e9-4518-ab30-82ab6807056d","Type":"ContainerStarted","Data":"70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142"} Jan 28 17:25:57 crc kubenswrapper[4811]: I0128 17:25:57.400785 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" podStartSLOduration=17.060652938 podStartE2EDuration="57.400765262s" podCreationTimestamp="2026-01-28 17:25:00 +0000 UTC" firstStartedPulling="2026-01-28 17:25:01.047640112 +0000 UTC m=+5993.802003695" lastFinishedPulling="2026-01-28 17:25:41.387752436 +0000 UTC m=+6034.142116019" observedRunningTime="2026-01-28 17:25:57.392466436 +0000 UTC m=+6050.146830029" watchObservedRunningTime="2026-01-28 17:25:57.400765262 +0000 UTC m=+6050.155128845" Jan 28 17:25:58 crc kubenswrapper[4811]: I0128 17:25:58.656732 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:58 crc kubenswrapper[4811]: I0128 17:25:58.656805 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:25:59 crc kubenswrapper[4811]: I0128 17:25:59.703864 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-rgdmv" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="registry-server" probeResult="failure" output=< Jan 28 17:25:59 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:25:59 crc kubenswrapper[4811]: > Jan 28 17:26:04 crc kubenswrapper[4811]: I0128 17:26:04.941747 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-g7hvz"] Jan 28 17:26:04 crc kubenswrapper[4811]: I0128 17:26:04.944382 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:04 crc kubenswrapper[4811]: I0128 17:26:04.948101 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Jan 28 17:26:04 crc kubenswrapper[4811]: I0128 17:26:04.948184 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Jan 28 17:26:04 crc kubenswrapper[4811]: I0128 17:26:04.953389 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Jan 28 17:26:04 crc kubenswrapper[4811]: I0128 17:26:04.958474 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-g7hvz"] Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.017967 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/78c1e707-1148-4a30-b900-7bf0ffc947d7-config-data-merged\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.018299 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-combined-ca-bundle\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.018420 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/78c1e707-1148-4a30-b900-7bf0ffc947d7-hm-ports\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.018550 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-config-data\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.018810 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-scripts\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.018863 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-amphora-certs\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.120268 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-scripts\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.120323 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-amphora-certs\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.120369 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/78c1e707-1148-4a30-b900-7bf0ffc947d7-config-data-merged\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.120426 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-combined-ca-bundle\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.120480 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/78c1e707-1148-4a30-b900-7bf0ffc947d7-hm-ports\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.120516 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-config-data\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.121308 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/78c1e707-1148-4a30-b900-7bf0ffc947d7-config-data-merged\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.121740 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/78c1e707-1148-4a30-b900-7bf0ffc947d7-hm-ports\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.128545 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-scripts\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.128752 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-combined-ca-bundle\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.140733 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-amphora-certs\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.140761 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c1e707-1148-4a30-b900-7bf0ffc947d7-config-data\") pod \"octavia-healthmanager-g7hvz\" (UID: \"78c1e707-1148-4a30-b900-7bf0ffc947d7\") " pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.271099 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:05 crc kubenswrapper[4811]: I0128 17:26:05.884101 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-g7hvz"] Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.452023 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-g7hvz" event={"ID":"78c1e707-1148-4a30-b900-7bf0ffc947d7","Type":"ContainerStarted","Data":"4aabfb343e34c167ad9410fe289f3d0578b4a3e3496d676029ab3db0bb5fda68"} Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.452063 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-g7hvz" event={"ID":"78c1e707-1148-4a30-b900-7bf0ffc947d7","Type":"ContainerStarted","Data":"9f9c66d360ffc76efc9fcc3fcdcaa149dcc9b99e6e7af7be84c7d4b5f38ee0eb"} Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.527770 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-9lwmc"] Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.533632 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.538047 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.541468 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.555166 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-9lwmc"] Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.652510 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/77459561-f220-4376-b309-8cb4e7b06f43-config-data-merged\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.652579 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-amphora-certs\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.652609 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/77459561-f220-4376-b309-8cb4e7b06f43-hm-ports\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.652654 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-scripts\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.652873 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-combined-ca-bundle\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.653331 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-config-data\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755172 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-config-data\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755264 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/77459561-f220-4376-b309-8cb4e7b06f43-config-data-merged\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755294 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-amphora-certs\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755320 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/77459561-f220-4376-b309-8cb4e7b06f43-hm-ports\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755356 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-scripts\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755478 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-combined-ca-bundle\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.755781 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/77459561-f220-4376-b309-8cb4e7b06f43-config-data-merged\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.756628 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/77459561-f220-4376-b309-8cb4e7b06f43-hm-ports\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.762398 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-config-data\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.762485 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-scripts\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.762561 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-combined-ca-bundle\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.764063 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/77459561-f220-4376-b309-8cb4e7b06f43-amphora-certs\") pod \"octavia-housekeeping-9lwmc\" (UID: \"77459561-f220-4376-b309-8cb4e7b06f43\") " pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:06 crc kubenswrapper[4811]: I0128 17:26:06.863559 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.456364 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-9lwmc"] Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.653492 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-sbqvf"] Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.655682 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.659152 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.659421 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.668780 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-sbqvf"] Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.781968 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3d120144-e100-4514-9e1f-7e6fb95c26dc-hm-ports\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.782372 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3d120144-e100-4514-9e1f-7e6fb95c26dc-config-data-merged\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.782490 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-config-data\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.782587 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-amphora-certs\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.782717 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-scripts\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.782804 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-combined-ca-bundle\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.885322 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-config-data\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.885463 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-amphora-certs\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.885564 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-scripts\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.885620 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-combined-ca-bundle\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.885661 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3d120144-e100-4514-9e1f-7e6fb95c26dc-hm-ports\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.885691 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3d120144-e100-4514-9e1f-7e6fb95c26dc-config-data-merged\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.886217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3d120144-e100-4514-9e1f-7e6fb95c26dc-config-data-merged\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.888198 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3d120144-e100-4514-9e1f-7e6fb95c26dc-hm-ports\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.892552 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-amphora-certs\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.892848 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-config-data\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.893051 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-combined-ca-bundle\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.898879 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d120144-e100-4514-9e1f-7e6fb95c26dc-scripts\") pod \"octavia-worker-sbqvf\" (UID: \"3d120144-e100-4514-9e1f-7e6fb95c26dc\") " pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:07 crc kubenswrapper[4811]: I0128 17:26:07.985035 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:08 crc kubenswrapper[4811]: I0128 17:26:08.486806 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9lwmc" event={"ID":"77459561-f220-4376-b309-8cb4e7b06f43","Type":"ContainerStarted","Data":"ac5cf2046c69b63eb116464c2787ac6b3d69faa956b8da6c278502c6873d68e9"} Jan 28 17:26:08 crc kubenswrapper[4811]: I0128 17:26:08.591650 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-sbqvf"] Jan 28 17:26:09 crc kubenswrapper[4811]: I0128 17:26:09.497695 4811 generic.go:334] "Generic (PLEG): container finished" podID="78c1e707-1148-4a30-b900-7bf0ffc947d7" containerID="4aabfb343e34c167ad9410fe289f3d0578b4a3e3496d676029ab3db0bb5fda68" exitCode=0 Jan 28 17:26:09 crc kubenswrapper[4811]: I0128 17:26:09.497792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-g7hvz" event={"ID":"78c1e707-1148-4a30-b900-7bf0ffc947d7","Type":"ContainerDied","Data":"4aabfb343e34c167ad9410fe289f3d0578b4a3e3496d676029ab3db0bb5fda68"} Jan 28 17:26:09 crc kubenswrapper[4811]: I0128 17:26:09.499528 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-sbqvf" event={"ID":"3d120144-e100-4514-9e1f-7e6fb95c26dc","Type":"ContainerStarted","Data":"bf0233045c3cdda96891b8c20f2b56dde7f26b976a6d03887bd0e7f5c73793ea"} Jan 28 17:26:09 crc kubenswrapper[4811]: I0128 17:26:09.717959 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-rgdmv" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="registry-server" probeResult="failure" output=< Jan 28 17:26:09 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:26:09 crc kubenswrapper[4811]: > Jan 28 17:26:09 crc kubenswrapper[4811]: I0128 17:26:09.775953 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-g7hvz"] Jan 28 17:26:10 crc kubenswrapper[4811]: I0128 17:26:10.512168 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-g7hvz" event={"ID":"78c1e707-1148-4a30-b900-7bf0ffc947d7","Type":"ContainerStarted","Data":"42347e51c75d3904f52d42ab0dcb56ed7b23b1d94c8cb9e3239a03190e461886"} Jan 28 17:26:10 crc kubenswrapper[4811]: I0128 17:26:10.513735 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:10 crc kubenswrapper[4811]: I0128 17:26:10.516047 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9lwmc" event={"ID":"77459561-f220-4376-b309-8cb4e7b06f43","Type":"ContainerStarted","Data":"6581421a2209a680f9ed2cf0d020397b877bd53d8bbdb677ec38c1683a448ad6"} Jan 28 17:26:10 crc kubenswrapper[4811]: I0128 17:26:10.540798 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-g7hvz" podStartSLOduration=6.54077386 podStartE2EDuration="6.54077386s" podCreationTimestamp="2026-01-28 17:26:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:26:10.532854685 +0000 UTC m=+6063.287218288" watchObservedRunningTime="2026-01-28 17:26:10.54077386 +0000 UTC m=+6063.295137443" Jan 28 17:26:11 crc kubenswrapper[4811]: I0128 17:26:11.533577 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-sbqvf" event={"ID":"3d120144-e100-4514-9e1f-7e6fb95c26dc","Type":"ContainerStarted","Data":"255e74a7e7150a079272fdffd1423f4fd114c08b6686f70069bc3b1688d78034"} Jan 28 17:26:11 crc kubenswrapper[4811]: I0128 17:26:11.537621 4811 generic.go:334] "Generic (PLEG): container finished" podID="77459561-f220-4376-b309-8cb4e7b06f43" containerID="6581421a2209a680f9ed2cf0d020397b877bd53d8bbdb677ec38c1683a448ad6" exitCode=0 Jan 28 17:26:11 crc kubenswrapper[4811]: I0128 17:26:11.537658 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9lwmc" event={"ID":"77459561-f220-4376-b309-8cb4e7b06f43","Type":"ContainerDied","Data":"6581421a2209a680f9ed2cf0d020397b877bd53d8bbdb677ec38c1683a448ad6"} Jan 28 17:26:12 crc kubenswrapper[4811]: I0128 17:26:12.551075 4811 generic.go:334] "Generic (PLEG): container finished" podID="3d120144-e100-4514-9e1f-7e6fb95c26dc" containerID="255e74a7e7150a079272fdffd1423f4fd114c08b6686f70069bc3b1688d78034" exitCode=0 Jan 28 17:26:12 crc kubenswrapper[4811]: I0128 17:26:12.551418 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-sbqvf" event={"ID":"3d120144-e100-4514-9e1f-7e6fb95c26dc","Type":"ContainerDied","Data":"255e74a7e7150a079272fdffd1423f4fd114c08b6686f70069bc3b1688d78034"} Jan 28 17:26:12 crc kubenswrapper[4811]: I0128 17:26:12.556300 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9lwmc" event={"ID":"77459561-f220-4376-b309-8cb4e7b06f43","Type":"ContainerStarted","Data":"353a73948e164e7059a5ac2e70e4c219c02ec36d88280f1a4434bf721d730813"} Jan 28 17:26:12 crc kubenswrapper[4811]: I0128 17:26:12.557522 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:12 crc kubenswrapper[4811]: I0128 17:26:12.614919 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-9lwmc" podStartSLOduration=4.978892373 podStartE2EDuration="6.614897589s" podCreationTimestamp="2026-01-28 17:26:06 +0000 UTC" firstStartedPulling="2026-01-28 17:26:07.486351283 +0000 UTC m=+6060.240714876" lastFinishedPulling="2026-01-28 17:26:09.122356509 +0000 UTC m=+6061.876720092" observedRunningTime="2026-01-28 17:26:12.608871644 +0000 UTC m=+6065.363235247" watchObservedRunningTime="2026-01-28 17:26:12.614897589 +0000 UTC m=+6065.369261172" Jan 28 17:26:13 crc kubenswrapper[4811]: I0128 17:26:13.584686 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-sbqvf" event={"ID":"3d120144-e100-4514-9e1f-7e6fb95c26dc","Type":"ContainerStarted","Data":"a431cb005706665e66936e17a6d00da5e1fc4c2db5173cda51b643169486973d"} Jan 28 17:26:13 crc kubenswrapper[4811]: I0128 17:26:13.586271 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:13 crc kubenswrapper[4811]: I0128 17:26:13.612189 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-sbqvf" podStartSLOduration=4.651468081 podStartE2EDuration="6.61216404s" podCreationTimestamp="2026-01-28 17:26:07 +0000 UTC" firstStartedPulling="2026-01-28 17:26:08.59657826 +0000 UTC m=+6061.350941843" lastFinishedPulling="2026-01-28 17:26:10.557274219 +0000 UTC m=+6063.311637802" observedRunningTime="2026-01-28 17:26:13.607027089 +0000 UTC m=+6066.361390672" watchObservedRunningTime="2026-01-28 17:26:13.61216404 +0000 UTC m=+6066.366527623" Jan 28 17:26:18 crc kubenswrapper[4811]: I0128 17:26:18.708391 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:26:18 crc kubenswrapper[4811]: I0128 17:26:18.764731 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:26:18 crc kubenswrapper[4811]: I0128 17:26:18.947205 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rgdmv"] Jan 28 17:26:20 crc kubenswrapper[4811]: I0128 17:26:20.297424 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-g7hvz" Jan 28 17:26:20 crc kubenswrapper[4811]: I0128 17:26:20.645952 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rgdmv" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="registry-server" containerID="cri-o://86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5" gracePeriod=2 Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.155932 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.269711 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-utilities\") pod \"2717f028-9fd4-4280-9f59-fb917b8ad100\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.269891 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tf98\" (UniqueName: \"kubernetes.io/projected/2717f028-9fd4-4280-9f59-fb917b8ad100-kube-api-access-5tf98\") pod \"2717f028-9fd4-4280-9f59-fb917b8ad100\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.269961 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-catalog-content\") pod \"2717f028-9fd4-4280-9f59-fb917b8ad100\" (UID: \"2717f028-9fd4-4280-9f59-fb917b8ad100\") " Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.270769 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-utilities" (OuterVolumeSpecName: "utilities") pod "2717f028-9fd4-4280-9f59-fb917b8ad100" (UID: "2717f028-9fd4-4280-9f59-fb917b8ad100"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.276375 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2717f028-9fd4-4280-9f59-fb917b8ad100-kube-api-access-5tf98" (OuterVolumeSpecName: "kube-api-access-5tf98") pod "2717f028-9fd4-4280-9f59-fb917b8ad100" (UID: "2717f028-9fd4-4280-9f59-fb917b8ad100"). InnerVolumeSpecName "kube-api-access-5tf98". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.319057 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2717f028-9fd4-4280-9f59-fb917b8ad100" (UID: "2717f028-9fd4-4280-9f59-fb917b8ad100"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.373370 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.373417 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tf98\" (UniqueName: \"kubernetes.io/projected/2717f028-9fd4-4280-9f59-fb917b8ad100-kube-api-access-5tf98\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.373447 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2717f028-9fd4-4280-9f59-fb917b8ad100-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.658186 4811 generic.go:334] "Generic (PLEG): container finished" podID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerID="86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5" exitCode=0 Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.658233 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerDied","Data":"86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5"} Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.658275 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgdmv" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.658288 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgdmv" event={"ID":"2717f028-9fd4-4280-9f59-fb917b8ad100","Type":"ContainerDied","Data":"ffbcf626934c6a4d7c4700b412d6565da1aa03f0cf30d24900a955609d0a905c"} Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.658312 4811 scope.go:117] "RemoveContainer" containerID="86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.692274 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rgdmv"] Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.700404 4811 scope.go:117] "RemoveContainer" containerID="b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.704380 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rgdmv"] Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.728465 4811 scope.go:117] "RemoveContainer" containerID="86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.791285 4811 scope.go:117] "RemoveContainer" containerID="86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5" Jan 28 17:26:21 crc kubenswrapper[4811]: E0128 17:26:21.791990 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5\": container with ID starting with 86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5 not found: ID does not exist" containerID="86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.792046 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5"} err="failed to get container status \"86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5\": rpc error: code = NotFound desc = could not find container \"86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5\": container with ID starting with 86415975e7bf2874c9546bc9697c8e1948c15c3c75d8bc6c024bf04c97e8d9d5 not found: ID does not exist" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.792080 4811 scope.go:117] "RemoveContainer" containerID="b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611" Jan 28 17:26:21 crc kubenswrapper[4811]: E0128 17:26:21.792394 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611\": container with ID starting with b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611 not found: ID does not exist" containerID="b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.792423 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611"} err="failed to get container status \"b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611\": rpc error: code = NotFound desc = could not find container \"b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611\": container with ID starting with b85e4ab8bde0fcd0c79fb6328c155e3ad809e2ac40298d91ec64e49d04679611 not found: ID does not exist" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.792464 4811 scope.go:117] "RemoveContainer" containerID="86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554" Jan 28 17:26:21 crc kubenswrapper[4811]: E0128 17:26:21.792665 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554\": container with ID starting with 86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554 not found: ID does not exist" containerID="86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.792690 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554"} err="failed to get container status \"86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554\": rpc error: code = NotFound desc = could not find container \"86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554\": container with ID starting with 86e07c04e25dd039135224ed33d3572a4568e35c828fb7fb4451fdd8841c6554 not found: ID does not exist" Jan 28 17:26:21 crc kubenswrapper[4811]: I0128 17:26:21.894854 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-9lwmc" Jan 28 17:26:22 crc kubenswrapper[4811]: I0128 17:26:22.351656 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" path="/var/lib/kubelet/pods/2717f028-9fd4-4280-9f59-fb917b8ad100/volumes" Jan 28 17:26:23 crc kubenswrapper[4811]: I0128 17:26:23.018519 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-sbqvf" Jan 28 17:26:24 crc kubenswrapper[4811]: I0128 17:26:24.511255 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-rfm6m"] Jan 28 17:26:24 crc kubenswrapper[4811]: I0128 17:26:24.511839 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerName="octavia-amphora-httpd" containerID="cri-o://70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142" gracePeriod=30 Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.121770 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.250857 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8ec0f7d1-06e9-4518-ab30-82ab6807056d-httpd-config\") pod \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.250964 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/8ec0f7d1-06e9-4518-ab30-82ab6807056d-amphora-image\") pod \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\" (UID: \"8ec0f7d1-06e9-4518-ab30-82ab6807056d\") " Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.280406 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ec0f7d1-06e9-4518-ab30-82ab6807056d-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "8ec0f7d1-06e9-4518-ab30-82ab6807056d" (UID: "8ec0f7d1-06e9-4518-ab30-82ab6807056d"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.290327 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ec0f7d1-06e9-4518-ab30-82ab6807056d-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "8ec0f7d1-06e9-4518-ab30-82ab6807056d" (UID: "8ec0f7d1-06e9-4518-ab30-82ab6807056d"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.353207 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8ec0f7d1-06e9-4518-ab30-82ab6807056d-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.353239 4811 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/8ec0f7d1-06e9-4518-ab30-82ab6807056d-amphora-image\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.696314 4811 generic.go:334] "Generic (PLEG): container finished" podID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerID="70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142" exitCode=0 Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.696354 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" event={"ID":"8ec0f7d1-06e9-4518-ab30-82ab6807056d","Type":"ContainerDied","Data":"70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142"} Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.696378 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" event={"ID":"8ec0f7d1-06e9-4518-ab30-82ab6807056d","Type":"ContainerDied","Data":"16af27b664ebb41eb8a0351aff6d6027d80d170fdec87e96dac5b5bd22401f78"} Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.696393 4811 scope.go:117] "RemoveContainer" containerID="70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.696524 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-65dd99cb46-rfm6m" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.736877 4811 scope.go:117] "RemoveContainer" containerID="e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.761935 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-rfm6m"] Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.770376 4811 scope.go:117] "RemoveContainer" containerID="70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142" Jan 28 17:26:25 crc kubenswrapper[4811]: E0128 17:26:25.770842 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142\": container with ID starting with 70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142 not found: ID does not exist" containerID="70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.770870 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142"} err="failed to get container status \"70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142\": rpc error: code = NotFound desc = could not find container \"70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142\": container with ID starting with 70990a02718a354c5cfb80ac85d38564b7e9e3551e15d3c8cdc957e063013142 not found: ID does not exist" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.770889 4811 scope.go:117] "RemoveContainer" containerID="e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be" Jan 28 17:26:25 crc kubenswrapper[4811]: E0128 17:26:25.771199 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be\": container with ID starting with e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be not found: ID does not exist" containerID="e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.771223 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be"} err="failed to get container status \"e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be\": rpc error: code = NotFound desc = could not find container \"e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be\": container with ID starting with e0a3f5451edc2a2b71a31cdff46b3d32fba0615e90bf994af2eef1c2971964be not found: ID does not exist" Jan 28 17:26:25 crc kubenswrapper[4811]: I0128 17:26:25.771949 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-rfm6m"] Jan 28 17:26:26 crc kubenswrapper[4811]: I0128 17:26:26.349881 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" path="/var/lib/kubelet/pods/8ec0f7d1-06e9-4518-ab30-82ab6807056d/volumes" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.683330 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-4zxm4"] Jan 28 17:26:28 crc kubenswrapper[4811]: E0128 17:26:28.684378 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerName="init" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684390 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerName="init" Jan 28 17:26:28 crc kubenswrapper[4811]: E0128 17:26:28.684403 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="extract-utilities" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684410 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="extract-utilities" Jan 28 17:26:28 crc kubenswrapper[4811]: E0128 17:26:28.684425 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="extract-content" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684449 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="extract-content" Jan 28 17:26:28 crc kubenswrapper[4811]: E0128 17:26:28.684464 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="registry-server" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684470 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="registry-server" Jan 28 17:26:28 crc kubenswrapper[4811]: E0128 17:26:28.684481 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerName="octavia-amphora-httpd" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684487 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerName="octavia-amphora-httpd" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684652 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ec0f7d1-06e9-4518-ab30-82ab6807056d" containerName="octavia-amphora-httpd" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.684673 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2717f028-9fd4-4280-9f59-fb917b8ad100" containerName="registry-server" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.686152 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.688452 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.697884 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-4zxm4"] Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.722668 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb-httpd-config\") pod \"octavia-image-upload-65dd99cb46-4zxm4\" (UID: \"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb\") " pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.722770 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb-amphora-image\") pod \"octavia-image-upload-65dd99cb46-4zxm4\" (UID: \"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb\") " pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.829766 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb-httpd-config\") pod \"octavia-image-upload-65dd99cb46-4zxm4\" (UID: \"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb\") " pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.829872 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb-amphora-image\") pod \"octavia-image-upload-65dd99cb46-4zxm4\" (UID: \"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb\") " pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.830328 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb-amphora-image\") pod \"octavia-image-upload-65dd99cb46-4zxm4\" (UID: \"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb\") " pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:28 crc kubenswrapper[4811]: I0128 17:26:28.844410 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb-httpd-config\") pod \"octavia-image-upload-65dd99cb46-4zxm4\" (UID: \"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb\") " pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:29 crc kubenswrapper[4811]: I0128 17:26:29.005899 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" Jan 28 17:26:29 crc kubenswrapper[4811]: I0128 17:26:29.483695 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-65dd99cb46-4zxm4"] Jan 28 17:26:29 crc kubenswrapper[4811]: I0128 17:26:29.732605 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" event={"ID":"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb","Type":"ContainerStarted","Data":"66acc5469a401fc9d977d9dc339ddcc71d3282e7eafe3d9b14a9ab12ce290155"} Jan 28 17:26:30 crc kubenswrapper[4811]: I0128 17:26:30.740792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" event={"ID":"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb","Type":"ContainerStarted","Data":"b8cc7948decf04e9582a8bfb66a4abc9d93e5bdb7d8c8f9dcfb0b03e7dd305da"} Jan 28 17:26:33 crc kubenswrapper[4811]: I0128 17:26:33.087572 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:26:33 crc kubenswrapper[4811]: I0128 17:26:33.087878 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.029373 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-m9pwb"] Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.039158 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-m9pwb"] Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.049198 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-9b88-account-create-update-8r9wc"] Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.057507 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-9b88-account-create-update-8r9wc"] Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.351555 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15d20415-1194-4077-9c8c-29ecf1c7e286" path="/var/lib/kubelet/pods/15d20415-1194-4077-9c8c-29ecf1c7e286/volumes" Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.354000 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="439a5db7-3e8b-4871-b500-3ba8c6039691" path="/var/lib/kubelet/pods/439a5db7-3e8b-4871-b500-3ba8c6039691/volumes" Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.787155 4811 generic.go:334] "Generic (PLEG): container finished" podID="19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb" containerID="b8cc7948decf04e9582a8bfb66a4abc9d93e5bdb7d8c8f9dcfb0b03e7dd305da" exitCode=0 Jan 28 17:26:34 crc kubenswrapper[4811]: I0128 17:26:34.787205 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" event={"ID":"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb","Type":"ContainerDied","Data":"b8cc7948decf04e9582a8bfb66a4abc9d93e5bdb7d8c8f9dcfb0b03e7dd305da"} Jan 28 17:26:35 crc kubenswrapper[4811]: I0128 17:26:35.797649 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" event={"ID":"19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb","Type":"ContainerStarted","Data":"b43294191d3e983e05caba275a1db6bb5dae348e91987c79bd6e6e93a69a95a0"} Jan 28 17:26:35 crc kubenswrapper[4811]: I0128 17:26:35.815846 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-65dd99cb46-4zxm4" podStartSLOduration=7.345192169 podStartE2EDuration="7.815822886s" podCreationTimestamp="2026-01-28 17:26:28 +0000 UTC" firstStartedPulling="2026-01-28 17:26:29.48588131 +0000 UTC m=+6082.240244893" lastFinishedPulling="2026-01-28 17:26:29.956512027 +0000 UTC m=+6082.710875610" observedRunningTime="2026-01-28 17:26:35.81339147 +0000 UTC m=+6088.567755053" watchObservedRunningTime="2026-01-28 17:26:35.815822886 +0000 UTC m=+6088.570186469" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.023760 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6ff9875fc-rngj5"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.025924 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.032971 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.033201 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-hpqjj" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.041194 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.042898 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.058675 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6ff9875fc-rngj5"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.125757 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.126248 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-log" containerID="cri-o://d1876907051ab6bbe91a76928452e9ab313dcf35ea99e23a38a39782b1e8ccdd" gracePeriod=30 Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.126330 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-httpd" containerID="cri-o://0607aa5f56411773fa3b491b98ade32a2acf2e18273bcdc4177c03e51f447445" gracePeriod=30 Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.160704 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.160966 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-log" containerID="cri-o://5b051ae206aa0aa155efec89b12be9b53147d8c5310b0ee0757ddca087b8b7d2" gracePeriod=30 Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.162124 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-httpd" containerID="cri-o://c8a168ad02dbb98b713cad79c536ca2d9670d0416b666f1dd5932b9145aa7cd5" gracePeriod=30 Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.181474 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbm7w\" (UniqueName: \"kubernetes.io/projected/651c91bb-dc5d-4416-bece-7647b1447487-kube-api-access-tbm7w\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.181521 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/651c91bb-dc5d-4416-bece-7647b1447487-horizon-secret-key\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.181579 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/651c91bb-dc5d-4416-bece-7647b1447487-logs\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.181600 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-scripts\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.181619 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-config-data\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.283047 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-scripts\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.284164 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-config-data\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.284117 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-scripts\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.285223 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-config-data\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.285471 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbm7w\" (UniqueName: \"kubernetes.io/projected/651c91bb-dc5d-4416-bece-7647b1447487-kube-api-access-tbm7w\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.285519 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/651c91bb-dc5d-4416-bece-7647b1447487-horizon-secret-key\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.286268 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/651c91bb-dc5d-4416-bece-7647b1447487-logs\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.286572 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/651c91bb-dc5d-4416-bece-7647b1447487-logs\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.291748 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/651c91bb-dc5d-4416-bece-7647b1447487-horizon-secret-key\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.301767 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbm7w\" (UniqueName: \"kubernetes.io/projected/651c91bb-dc5d-4416-bece-7647b1447487-kube-api-access-tbm7w\") pod \"horizon-6ff9875fc-rngj5\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.763403 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-967dbf479-zj5bt"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.765336 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.783118 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-967dbf479-zj5bt"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.815534 4811 generic.go:334] "Generic (PLEG): container finished" podID="a1447823-7ddf-4d19-965e-d975ce17c540" containerID="5b051ae206aa0aa155efec89b12be9b53147d8c5310b0ee0757ddca087b8b7d2" exitCode=143 Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.815639 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a1447823-7ddf-4d19-965e-d975ce17c540","Type":"ContainerDied","Data":"5b051ae206aa0aa155efec89b12be9b53147d8c5310b0ee0757ddca087b8b7d2"} Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.824080 4811 generic.go:334] "Generic (PLEG): container finished" podID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerID="d1876907051ab6bbe91a76928452e9ab313dcf35ea99e23a38a39782b1e8ccdd" exitCode=143 Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.824127 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"177ba4f4-b77d-49b6-b9af-37a5a5a872d7","Type":"ContainerDied","Data":"d1876907051ab6bbe91a76928452e9ab313dcf35ea99e23a38a39782b1e8ccdd"} Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.828018 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.897891 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e3c24a39-106a-4e13-aae3-7e8399c05eb0-horizon-secret-key\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.898308 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-scripts\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.898337 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lb5s\" (UniqueName: \"kubernetes.io/projected/e3c24a39-106a-4e13-aae3-7e8399c05eb0-kube-api-access-6lb5s\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.898389 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3c24a39-106a-4e13-aae3-7e8399c05eb0-logs\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.898539 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-config-data\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.901824 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6ff9875fc-rngj5"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.927326 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5ccf78dbdf-rqvtn"] Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.929361 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:36 crc kubenswrapper[4811]: I0128 17:26:36.954944 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5ccf78dbdf-rqvtn"] Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.001381 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-scripts\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.001491 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lb5s\" (UniqueName: \"kubernetes.io/projected/e3c24a39-106a-4e13-aae3-7e8399c05eb0-kube-api-access-6lb5s\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.001588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3c24a39-106a-4e13-aae3-7e8399c05eb0-logs\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.001733 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-config-data\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.001912 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e3c24a39-106a-4e13-aae3-7e8399c05eb0-horizon-secret-key\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.003104 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3c24a39-106a-4e13-aae3-7e8399c05eb0-logs\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.005754 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-scripts\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.006265 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-config-data\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.008190 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e3c24a39-106a-4e13-aae3-7e8399c05eb0-horizon-secret-key\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.043070 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lb5s\" (UniqueName: \"kubernetes.io/projected/e3c24a39-106a-4e13-aae3-7e8399c05eb0-kube-api-access-6lb5s\") pod \"horizon-967dbf479-zj5bt\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.104026 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4224m\" (UniqueName: \"kubernetes.io/projected/3c3541c8-9738-4b90-92cd-d8676444ef01-kube-api-access-4224m\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.104093 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3c3541c8-9738-4b90-92cd-d8676444ef01-horizon-secret-key\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.104122 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-config-data\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.104187 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3541c8-9738-4b90-92cd-d8676444ef01-logs\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.104254 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-scripts\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.137418 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.205977 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3c3541c8-9738-4b90-92cd-d8676444ef01-horizon-secret-key\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.206314 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-config-data\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.206419 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3541c8-9738-4b90-92cd-d8676444ef01-logs\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.206505 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-scripts\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.206617 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4224m\" (UniqueName: \"kubernetes.io/projected/3c3541c8-9738-4b90-92cd-d8676444ef01-kube-api-access-4224m\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.207249 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3541c8-9738-4b90-92cd-d8676444ef01-logs\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.207649 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-scripts\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.210194 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-config-data\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.212348 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3c3541c8-9738-4b90-92cd-d8676444ef01-horizon-secret-key\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.226571 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4224m\" (UniqueName: \"kubernetes.io/projected/3c3541c8-9738-4b90-92cd-d8676444ef01-kube-api-access-4224m\") pod \"horizon-5ccf78dbdf-rqvtn\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.282278 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.403453 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6ff9875fc-rngj5"] Jan 28 17:26:37 crc kubenswrapper[4811]: W0128 17:26:37.406594 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod651c91bb_dc5d_4416_bece_7647b1447487.slice/crio-89a336c08900baca27a5e544e3736cadb241af6df56847552bfdf2f3529ddcdb WatchSource:0}: Error finding container 89a336c08900baca27a5e544e3736cadb241af6df56847552bfdf2f3529ddcdb: Status 404 returned error can't find the container with id 89a336c08900baca27a5e544e3736cadb241af6df56847552bfdf2f3529ddcdb Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.621303 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-967dbf479-zj5bt"] Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.791881 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5ccf78dbdf-rqvtn"] Jan 28 17:26:37 crc kubenswrapper[4811]: W0128 17:26:37.791939 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c3541c8_9738_4b90_92cd_d8676444ef01.slice/crio-35583025378a9e2bcfa17cfa2f7bcefd369c4fbf53acb841723ae0ceea9c8f7f WatchSource:0}: Error finding container 35583025378a9e2bcfa17cfa2f7bcefd369c4fbf53acb841723ae0ceea9c8f7f: Status 404 returned error can't find the container with id 35583025378a9e2bcfa17cfa2f7bcefd369c4fbf53acb841723ae0ceea9c8f7f Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.842550 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-967dbf479-zj5bt" event={"ID":"e3c24a39-106a-4e13-aae3-7e8399c05eb0","Type":"ContainerStarted","Data":"20bc8d1062f09e8d4c8bd783b053466717e410de5fb2d592d0edf655420f9f06"} Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.845022 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6ff9875fc-rngj5" event={"ID":"651c91bb-dc5d-4416-bece-7647b1447487","Type":"ContainerStarted","Data":"89a336c08900baca27a5e544e3736cadb241af6df56847552bfdf2f3529ddcdb"} Jan 28 17:26:37 crc kubenswrapper[4811]: I0128 17:26:37.847220 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ccf78dbdf-rqvtn" event={"ID":"3c3541c8-9738-4b90-92cd-d8676444ef01","Type":"ContainerStarted","Data":"35583025378a9e2bcfa17cfa2f7bcefd369c4fbf53acb841723ae0ceea9c8f7f"} Jan 28 17:26:40 crc kubenswrapper[4811]: I0128 17:26:40.875798 4811 generic.go:334] "Generic (PLEG): container finished" podID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerID="0607aa5f56411773fa3b491b98ade32a2acf2e18273bcdc4177c03e51f447445" exitCode=0 Jan 28 17:26:40 crc kubenswrapper[4811]: I0128 17:26:40.875943 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"177ba4f4-b77d-49b6-b9af-37a5a5a872d7","Type":"ContainerDied","Data":"0607aa5f56411773fa3b491b98ade32a2acf2e18273bcdc4177c03e51f447445"} Jan 28 17:26:40 crc kubenswrapper[4811]: I0128 17:26:40.878446 4811 generic.go:334] "Generic (PLEG): container finished" podID="a1447823-7ddf-4d19-965e-d975ce17c540" containerID="c8a168ad02dbb98b713cad79c536ca2d9670d0416b666f1dd5932b9145aa7cd5" exitCode=0 Jan 28 17:26:40 crc kubenswrapper[4811]: I0128 17:26:40.878475 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a1447823-7ddf-4d19-965e-d975ce17c540","Type":"ContainerDied","Data":"c8a168ad02dbb98b713cad79c536ca2d9670d0416b666f1dd5932b9145aa7cd5"} Jan 28 17:26:41 crc kubenswrapper[4811]: I0128 17:26:41.031202 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jjg5c"] Jan 28 17:26:41 crc kubenswrapper[4811]: I0128 17:26:41.039117 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jjg5c"] Jan 28 17:26:42 crc kubenswrapper[4811]: I0128 17:26:42.354146 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1558ba0e-ed1b-48ab-9004-8f6caedf79fd" path="/var/lib/kubelet/pods/1558ba0e-ed1b-48ab-9004-8f6caedf79fd/volumes" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.756934 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.830803 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-combined-ca-bundle\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.830869 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-httpd-run\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.830942 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-logs\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.830981 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-ceph\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.831010 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-scripts\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.831063 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-config-data\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.831106 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tslgz\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-kube-api-access-tslgz\") pod \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\" (UID: \"177ba4f4-b77d-49b6-b9af-37a5a5a872d7\") " Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.832263 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-logs" (OuterVolumeSpecName: "logs") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.832705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.835955 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-ceph" (OuterVolumeSpecName: "ceph") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.837573 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-scripts" (OuterVolumeSpecName: "scripts") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.848598 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-kube-api-access-tslgz" (OuterVolumeSpecName: "kube-api-access-tslgz") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "kube-api-access-tslgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.933850 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tslgz\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-kube-api-access-tslgz\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.933902 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.933917 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.933928 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.933940 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.939746 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6ff9875fc-rngj5" event={"ID":"651c91bb-dc5d-4416-bece-7647b1447487","Type":"ContainerStarted","Data":"c74b8adc383ce876d34feee9452fafdfbb43cdaaad075efa448839155d5776b9"} Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.951670 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.951836 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"177ba4f4-b77d-49b6-b9af-37a5a5a872d7","Type":"ContainerDied","Data":"da52dda329d9ba877cab1e14273d7e9a307e19fea79fa38b74ded23f4c59c18c"} Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.951894 4811 scope.go:117] "RemoveContainer" containerID="0607aa5f56411773fa3b491b98ade32a2acf2e18273bcdc4177c03e51f447445" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.957283 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ccf78dbdf-rqvtn" event={"ID":"3c3541c8-9738-4b90-92cd-d8676444ef01","Type":"ContainerStarted","Data":"b3f2c6f6d78081092b8603383147e066d98fa0c30d74c32eab677f3241ecbd27"} Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.959889 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.961296 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-967dbf479-zj5bt" event={"ID":"e3c24a39-106a-4e13-aae3-7e8399c05eb0","Type":"ContainerStarted","Data":"36424f4b376650d076b16b5351a70299f435ba81adfd8e6c03a834a38442cb37"} Jan 28 17:26:44 crc kubenswrapper[4811]: I0128 17:26:44.997635 4811 scope.go:117] "RemoveContainer" containerID="d1876907051ab6bbe91a76928452e9ab313dcf35ea99e23a38a39782b1e8ccdd" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.037769 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.166167 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-config-data" (OuterVolumeSpecName: "config-data") pod "177ba4f4-b77d-49b6-b9af-37a5a5a872d7" (UID: "177ba4f4-b77d-49b6-b9af-37a5a5a872d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.241415 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177ba4f4-b77d-49b6-b9af-37a5a5a872d7-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.515715 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.539169 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.558161 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:26:45 crc kubenswrapper[4811]: E0128 17:26:45.558729 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-log" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.558747 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-log" Jan 28 17:26:45 crc kubenswrapper[4811]: E0128 17:26:45.558780 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-httpd" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.558787 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-httpd" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.559054 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-log" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.559083 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" containerName="glance-httpd" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.560878 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.564941 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.605287 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650134 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-ceph\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650222 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650280 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-scripts\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650465 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650617 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-config-data\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650644 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-logs\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.650709 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq5j9\" (UniqueName: \"kubernetes.io/projected/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-kube-api-access-nq5j9\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751239 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751298 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-scripts\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751351 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751379 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-config-data\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751399 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-logs\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751524 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq5j9\" (UniqueName: \"kubernetes.io/projected/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-kube-api-access-nq5j9\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751578 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-ceph\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.751719 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.752423 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-logs\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.757531 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.757799 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-config-data\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.758709 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-scripts\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.761768 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-ceph\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.769018 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq5j9\" (UniqueName: \"kubernetes.io/projected/6b46e8e8-aeb5-49f1-a79a-9dce013bddf0-kube-api-access-nq5j9\") pod \"glance-default-external-api-0\" (UID: \"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0\") " pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.862787 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.894415 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.979789 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ccf78dbdf-rqvtn" event={"ID":"3c3541c8-9738-4b90-92cd-d8676444ef01","Type":"ContainerStarted","Data":"d745390f451d5e572a29326b925407075300afd39e37d98e60d311db5c5c612d"} Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.983719 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-967dbf479-zj5bt" event={"ID":"e3c24a39-106a-4e13-aae3-7e8399c05eb0","Type":"ContainerStarted","Data":"7384da6fec1e74e9ef1e16b0a6c26184c2ad03dc865677fff64514520e72926a"} Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.992249 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a1447823-7ddf-4d19-965e-d975ce17c540","Type":"ContainerDied","Data":"b8b4d87081eecaa4b641e2cfb0c48bf528871add99225e6beff5fb6d91130e29"} Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.992300 4811 scope.go:117] "RemoveContainer" containerID="c8a168ad02dbb98b713cad79c536ca2d9670d0416b666f1dd5932b9145aa7cd5" Jan 28 17:26:45 crc kubenswrapper[4811]: I0128 17:26:45.992391 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.010622 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6ff9875fc-rngj5" event={"ID":"651c91bb-dc5d-4416-bece-7647b1447487","Type":"ContainerStarted","Data":"148cfb4453c8e090409666bec623e76d633377d73cc4fcc15e7f7689b9121fc9"} Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.010796 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6ff9875fc-rngj5" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon-log" containerID="cri-o://c74b8adc383ce876d34feee9452fafdfbb43cdaaad075efa448839155d5776b9" gracePeriod=30 Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.013942 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5ccf78dbdf-rqvtn" podStartSLOduration=3.408483462 podStartE2EDuration="10.013924132s" podCreationTimestamp="2026-01-28 17:26:36 +0000 UTC" firstStartedPulling="2026-01-28 17:26:37.795733799 +0000 UTC m=+6090.550097382" lastFinishedPulling="2026-01-28 17:26:44.401174469 +0000 UTC m=+6097.155538052" observedRunningTime="2026-01-28 17:26:46.001022041 +0000 UTC m=+6098.755385624" watchObservedRunningTime="2026-01-28 17:26:46.013924132 +0000 UTC m=+6098.768287715" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.018055 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6ff9875fc-rngj5" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon" containerID="cri-o://148cfb4453c8e090409666bec623e76d633377d73cc4fcc15e7f7689b9121fc9" gracePeriod=30 Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.030286 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-967dbf479-zj5bt" podStartSLOduration=3.236147859 podStartE2EDuration="10.030262876s" podCreationTimestamp="2026-01-28 17:26:36 +0000 UTC" firstStartedPulling="2026-01-28 17:26:37.625405081 +0000 UTC m=+6090.379768654" lastFinishedPulling="2026-01-28 17:26:44.419520088 +0000 UTC m=+6097.173883671" observedRunningTime="2026-01-28 17:26:46.022230368 +0000 UTC m=+6098.776593951" watchObservedRunningTime="2026-01-28 17:26:46.030262876 +0000 UTC m=+6098.784626459" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.053979 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6ff9875fc-rngj5" podStartSLOduration=4.096525716 podStartE2EDuration="11.053960442s" podCreationTimestamp="2026-01-28 17:26:35 +0000 UTC" firstStartedPulling="2026-01-28 17:26:37.415510784 +0000 UTC m=+6090.169874367" lastFinishedPulling="2026-01-28 17:26:44.3729455 +0000 UTC m=+6097.127309093" observedRunningTime="2026-01-28 17:26:46.051585908 +0000 UTC m=+6098.805949491" watchObservedRunningTime="2026-01-28 17:26:46.053960442 +0000 UTC m=+6098.808324025" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.055423 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-scripts\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.055524 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6rpg\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-kube-api-access-k6rpg\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.055605 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-ceph\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.055644 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-combined-ca-bundle\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.055719 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-logs\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.056337 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-config-data\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.056358 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-httpd-run\") pod \"a1447823-7ddf-4d19-965e-d975ce17c540\" (UID: \"a1447823-7ddf-4d19-965e-d975ce17c540\") " Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.064742 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-logs" (OuterVolumeSpecName: "logs") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.064794 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-scripts" (OuterVolumeSpecName: "scripts") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.066087 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.066261 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-ceph" (OuterVolumeSpecName: "ceph") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.072299 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-kube-api-access-k6rpg" (OuterVolumeSpecName: "kube-api-access-k6rpg") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "kube-api-access-k6rpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.083356 4811 scope.go:117] "RemoveContainer" containerID="5b051ae206aa0aa155efec89b12be9b53147d8c5310b0ee0757ddca087b8b7d2" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.109866 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.162379 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.162461 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.162471 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.162481 4811 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1447823-7ddf-4d19-965e-d975ce17c540-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.162490 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.162498 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6rpg\" (UniqueName: \"kubernetes.io/projected/a1447823-7ddf-4d19-965e-d975ce17c540-kube-api-access-k6rpg\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.164567 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-config-data" (OuterVolumeSpecName: "config-data") pod "a1447823-7ddf-4d19-965e-d975ce17c540" (UID: "a1447823-7ddf-4d19-965e-d975ce17c540"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.263964 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1447823-7ddf-4d19-965e-d975ce17c540-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.332119 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.385386 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="177ba4f4-b77d-49b6-b9af-37a5a5a872d7" path="/var/lib/kubelet/pods/177ba4f4-b77d-49b6-b9af-37a5a5a872d7/volumes" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.386254 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.386291 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:26:46 crc kubenswrapper[4811]: E0128 17:26:46.386904 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-log" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.386926 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-log" Jan 28 17:26:46 crc kubenswrapper[4811]: E0128 17:26:46.386947 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-httpd" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.386954 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-httpd" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.387299 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-httpd" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.387336 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" containerName="glance-log" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.400321 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.419308 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.441215 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.477817 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/62295afa-2362-41cd-95f4-8b1519e053d4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.477927 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.478023 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.478094 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62295afa-2362-41cd-95f4-8b1519e053d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.478125 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.478162 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfz4p\" (UniqueName: \"kubernetes.io/projected/62295afa-2362-41cd-95f4-8b1519e053d4-kube-api-access-vfz4p\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.478224 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62295afa-2362-41cd-95f4-8b1519e053d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: W0128 17:26:46.550078 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b46e8e8_aeb5_49f1_a79a_9dce013bddf0.slice/crio-7ae6acf456dc741cb9abca9c6aa241a7521668af03d115084b3b827c3023252c WatchSource:0}: Error finding container 7ae6acf456dc741cb9abca9c6aa241a7521668af03d115084b3b827c3023252c: Status 404 returned error can't find the container with id 7ae6acf456dc741cb9abca9c6aa241a7521668af03d115084b3b827c3023252c Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.561406 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.580797 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62295afa-2362-41cd-95f4-8b1519e053d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.580867 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.580887 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfz4p\" (UniqueName: \"kubernetes.io/projected/62295afa-2362-41cd-95f4-8b1519e053d4-kube-api-access-vfz4p\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.580950 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62295afa-2362-41cd-95f4-8b1519e053d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.580993 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/62295afa-2362-41cd-95f4-8b1519e053d4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.581025 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.581130 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.581397 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62295afa-2362-41cd-95f4-8b1519e053d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.581419 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62295afa-2362-41cd-95f4-8b1519e053d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.586015 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.587138 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/62295afa-2362-41cd-95f4-8b1519e053d4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.588709 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.598458 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62295afa-2362-41cd-95f4-8b1519e053d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.603530 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfz4p\" (UniqueName: \"kubernetes.io/projected/62295afa-2362-41cd-95f4-8b1519e053d4-kube-api-access-vfz4p\") pod \"glance-default-internal-api-0\" (UID: \"62295afa-2362-41cd-95f4-8b1519e053d4\") " pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.745078 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:46 crc kubenswrapper[4811]: I0128 17:26:46.828197 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:26:47 crc kubenswrapper[4811]: I0128 17:26:47.036282 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0","Type":"ContainerStarted","Data":"7ae6acf456dc741cb9abca9c6aa241a7521668af03d115084b3b827c3023252c"} Jan 28 17:26:47 crc kubenswrapper[4811]: I0128 17:26:47.139209 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:47 crc kubenswrapper[4811]: I0128 17:26:47.139274 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:26:47 crc kubenswrapper[4811]: I0128 17:26:47.282541 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:47 crc kubenswrapper[4811]: I0128 17:26:47.282590 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:26:47 crc kubenswrapper[4811]: I0128 17:26:47.407290 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 28 17:26:48 crc kubenswrapper[4811]: I0128 17:26:48.060071 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"62295afa-2362-41cd-95f4-8b1519e053d4","Type":"ContainerStarted","Data":"eb7a8c29043701b53b68bb781b1ab58d709d134e51d3896030fd1fca9da0c30d"} Jan 28 17:26:48 crc kubenswrapper[4811]: I0128 17:26:48.061229 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"62295afa-2362-41cd-95f4-8b1519e053d4","Type":"ContainerStarted","Data":"8bbc66e9f35944e244f0aaacc398c53e9c4cf39f0f7432174824edf20a9c8aaa"} Jan 28 17:26:48 crc kubenswrapper[4811]: I0128 17:26:48.071611 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0","Type":"ContainerStarted","Data":"e231ebd0725cf210999f928eb36858f64b70f69b39b4c3a95a6fb043a4966f7e"} Jan 28 17:26:48 crc kubenswrapper[4811]: I0128 17:26:48.071677 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6b46e8e8-aeb5-49f1-a79a-9dce013bddf0","Type":"ContainerStarted","Data":"ad5b0868d9ffcd71d347413deb347f0deac4a450d5682c366c9e34032d79242c"} Jan 28 17:26:48 crc kubenswrapper[4811]: I0128 17:26:48.388654 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1447823-7ddf-4d19-965e-d975ce17c540" path="/var/lib/kubelet/pods/a1447823-7ddf-4d19-965e-d975ce17c540/volumes" Jan 28 17:26:48 crc kubenswrapper[4811]: I0128 17:26:48.439655 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.439635806 podStartE2EDuration="3.439635806s" podCreationTimestamp="2026-01-28 17:26:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:26:48.103514051 +0000 UTC m=+6100.857877634" watchObservedRunningTime="2026-01-28 17:26:48.439635806 +0000 UTC m=+6101.193999389" Jan 28 17:26:49 crc kubenswrapper[4811]: I0128 17:26:49.080805 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"62295afa-2362-41cd-95f4-8b1519e053d4","Type":"ContainerStarted","Data":"945bda2f2a26f50cc6bd0e63c6a46b593ecb2db8063fd492eabdf77cdfda9912"} Jan 28 17:26:49 crc kubenswrapper[4811]: I0128 17:26:49.125833 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.125810564 podStartE2EDuration="3.125810564s" podCreationTimestamp="2026-01-28 17:26:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:26:49.108056251 +0000 UTC m=+6101.862419844" watchObservedRunningTime="2026-01-28 17:26:49.125810564 +0000 UTC m=+6101.880174147" Jan 28 17:26:55 crc kubenswrapper[4811]: I0128 17:26:55.896233 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 28 17:26:55 crc kubenswrapper[4811]: I0128 17:26:55.896820 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 28 17:26:55 crc kubenswrapper[4811]: I0128 17:26:55.946180 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 28 17:26:55 crc kubenswrapper[4811]: I0128 17:26:55.949311 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 28 17:26:56 crc kubenswrapper[4811]: I0128 17:26:56.146516 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 28 17:26:56 crc kubenswrapper[4811]: I0128 17:26:56.147579 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 28 17:26:56 crc kubenswrapper[4811]: I0128 17:26:56.745707 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:56 crc kubenswrapper[4811]: I0128 17:26:56.745766 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:56 crc kubenswrapper[4811]: I0128 17:26:56.783763 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:56 crc kubenswrapper[4811]: I0128 17:26:56.797465 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:57 crc kubenswrapper[4811]: I0128 17:26:57.140067 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-967dbf479-zj5bt" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Jan 28 17:26:57 crc kubenswrapper[4811]: I0128 17:26:57.159816 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:57 crc kubenswrapper[4811]: I0128 17:26:57.159891 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 28 17:26:57 crc kubenswrapper[4811]: I0128 17:26:57.286335 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5ccf78dbdf-rqvtn" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.113:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8080: connect: connection refused" Jan 28 17:26:58 crc kubenswrapper[4811]: I0128 17:26:58.166964 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:26:58 crc kubenswrapper[4811]: I0128 17:26:58.166996 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:26:59 crc kubenswrapper[4811]: I0128 17:26:59.174940 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:26:59 crc kubenswrapper[4811]: I0128 17:26:59.175246 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:27:00 crc kubenswrapper[4811]: I0128 17:27:00.026157 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 28 17:27:00 crc kubenswrapper[4811]: I0128 17:27:00.026687 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:27:00 crc kubenswrapper[4811]: I0128 17:27:00.034335 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 28 17:27:00 crc kubenswrapper[4811]: I0128 17:27:00.118158 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 28 17:27:00 crc kubenswrapper[4811]: I0128 17:27:00.190651 4811 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 28 17:27:00 crc kubenswrapper[4811]: I0128 17:27:00.302954 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 28 17:27:03 crc kubenswrapper[4811]: I0128 17:27:03.087878 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:27:03 crc kubenswrapper[4811]: I0128 17:27:03.088230 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:27:09 crc kubenswrapper[4811]: I0128 17:27:09.041121 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-z7rnj"] Jan 28 17:27:09 crc kubenswrapper[4811]: I0128 17:27:09.053173 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-z7rnj"] Jan 28 17:27:09 crc kubenswrapper[4811]: I0128 17:27:09.065881 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:27:09 crc kubenswrapper[4811]: I0128 17:27:09.193000 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:27:10 crc kubenswrapper[4811]: I0128 17:27:10.030067 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f1b2-account-create-update-td8jl"] Jan 28 17:27:10 crc kubenswrapper[4811]: I0128 17:27:10.040330 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f1b2-account-create-update-td8jl"] Jan 28 17:27:10 crc kubenswrapper[4811]: I0128 17:27:10.351509 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13ae10f7-7978-4310-94ff-784e15f4b9ee" path="/var/lib/kubelet/pods/13ae10f7-7978-4310-94ff-784e15f4b9ee/volumes" Jan 28 17:27:10 crc kubenswrapper[4811]: I0128 17:27:10.353356 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89" path="/var/lib/kubelet/pods/3b1ed9d8-7d6d-4411-9ff4-1723f1eaaf89/volumes" Jan 28 17:27:10 crc kubenswrapper[4811]: I0128 17:27:10.749208 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:27:10 crc kubenswrapper[4811]: I0128 17:27:10.954087 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:27:11 crc kubenswrapper[4811]: I0128 17:27:11.014353 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-967dbf479-zj5bt"] Jan 28 17:27:11 crc kubenswrapper[4811]: I0128 17:27:11.296622 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-967dbf479-zj5bt" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon-log" containerID="cri-o://36424f4b376650d076b16b5351a70299f435ba81adfd8e6c03a834a38442cb37" gracePeriod=30 Jan 28 17:27:11 crc kubenswrapper[4811]: I0128 17:27:11.297158 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-967dbf479-zj5bt" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" containerID="cri-o://7384da6fec1e74e9ef1e16b0a6c26184c2ad03dc865677fff64514520e72926a" gracePeriod=30 Jan 28 17:27:15 crc kubenswrapper[4811]: I0128 17:27:15.340758 4811 generic.go:334] "Generic (PLEG): container finished" podID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerID="7384da6fec1e74e9ef1e16b0a6c26184c2ad03dc865677fff64514520e72926a" exitCode=0 Jan 28 17:27:15 crc kubenswrapper[4811]: I0128 17:27:15.340857 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-967dbf479-zj5bt" event={"ID":"e3c24a39-106a-4e13-aae3-7e8399c05eb0","Type":"ContainerDied","Data":"7384da6fec1e74e9ef1e16b0a6c26184c2ad03dc865677fff64514520e72926a"} Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.358862 4811 generic.go:334] "Generic (PLEG): container finished" podID="651c91bb-dc5d-4416-bece-7647b1447487" containerID="148cfb4453c8e090409666bec623e76d633377d73cc4fcc15e7f7689b9121fc9" exitCode=137 Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.359387 4811 generic.go:334] "Generic (PLEG): container finished" podID="651c91bb-dc5d-4416-bece-7647b1447487" containerID="c74b8adc383ce876d34feee9452fafdfbb43cdaaad075efa448839155d5776b9" exitCode=137 Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.359099 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6ff9875fc-rngj5" event={"ID":"651c91bb-dc5d-4416-bece-7647b1447487","Type":"ContainerDied","Data":"148cfb4453c8e090409666bec623e76d633377d73cc4fcc15e7f7689b9121fc9"} Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.361662 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6ff9875fc-rngj5" event={"ID":"651c91bb-dc5d-4416-bece-7647b1447487","Type":"ContainerDied","Data":"c74b8adc383ce876d34feee9452fafdfbb43cdaaad075efa448839155d5776b9"} Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.507517 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.632571 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/651c91bb-dc5d-4416-bece-7647b1447487-logs\") pod \"651c91bb-dc5d-4416-bece-7647b1447487\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.632960 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-scripts\") pod \"651c91bb-dc5d-4416-bece-7647b1447487\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.633037 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/651c91bb-dc5d-4416-bece-7647b1447487-logs" (OuterVolumeSpecName: "logs") pod "651c91bb-dc5d-4416-bece-7647b1447487" (UID: "651c91bb-dc5d-4416-bece-7647b1447487"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.633217 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/651c91bb-dc5d-4416-bece-7647b1447487-horizon-secret-key\") pod \"651c91bb-dc5d-4416-bece-7647b1447487\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.633312 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbm7w\" (UniqueName: \"kubernetes.io/projected/651c91bb-dc5d-4416-bece-7647b1447487-kube-api-access-tbm7w\") pod \"651c91bb-dc5d-4416-bece-7647b1447487\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.633500 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-config-data\") pod \"651c91bb-dc5d-4416-bece-7647b1447487\" (UID: \"651c91bb-dc5d-4416-bece-7647b1447487\") " Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.634315 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/651c91bb-dc5d-4416-bece-7647b1447487-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.638616 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/651c91bb-dc5d-4416-bece-7647b1447487-kube-api-access-tbm7w" (OuterVolumeSpecName: "kube-api-access-tbm7w") pod "651c91bb-dc5d-4416-bece-7647b1447487" (UID: "651c91bb-dc5d-4416-bece-7647b1447487"). InnerVolumeSpecName "kube-api-access-tbm7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.638674 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/651c91bb-dc5d-4416-bece-7647b1447487-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "651c91bb-dc5d-4416-bece-7647b1447487" (UID: "651c91bb-dc5d-4416-bece-7647b1447487"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.661506 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-scripts" (OuterVolumeSpecName: "scripts") pod "651c91bb-dc5d-4416-bece-7647b1447487" (UID: "651c91bb-dc5d-4416-bece-7647b1447487"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.663211 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-config-data" (OuterVolumeSpecName: "config-data") pod "651c91bb-dc5d-4416-bece-7647b1447487" (UID: "651c91bb-dc5d-4416-bece-7647b1447487"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.735953 4811 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/651c91bb-dc5d-4416-bece-7647b1447487-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.736214 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbm7w\" (UniqueName: \"kubernetes.io/projected/651c91bb-dc5d-4416-bece-7647b1447487-kube-api-access-tbm7w\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.736227 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:16 crc kubenswrapper[4811]: I0128 17:27:16.736235 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/651c91bb-dc5d-4416-bece-7647b1447487-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.138644 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-967dbf479-zj5bt" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.372352 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6ff9875fc-rngj5" event={"ID":"651c91bb-dc5d-4416-bece-7647b1447487","Type":"ContainerDied","Data":"89a336c08900baca27a5e544e3736cadb241af6df56847552bfdf2f3529ddcdb"} Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.372449 4811 scope.go:117] "RemoveContainer" containerID="148cfb4453c8e090409666bec623e76d633377d73cc4fcc15e7f7689b9121fc9" Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.372459 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6ff9875fc-rngj5" Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.429100 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6ff9875fc-rngj5"] Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.440243 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6ff9875fc-rngj5"] Jan 28 17:27:17 crc kubenswrapper[4811]: I0128 17:27:17.587367 4811 scope.go:117] "RemoveContainer" containerID="c74b8adc383ce876d34feee9452fafdfbb43cdaaad075efa448839155d5776b9" Jan 28 17:27:18 crc kubenswrapper[4811]: I0128 17:27:18.351675 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="651c91bb-dc5d-4416-bece-7647b1447487" path="/var/lib/kubelet/pods/651c91bb-dc5d-4416-bece-7647b1447487/volumes" Jan 28 17:27:22 crc kubenswrapper[4811]: I0128 17:27:22.091665 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-5fgn8"] Jan 28 17:27:22 crc kubenswrapper[4811]: I0128 17:27:22.105202 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-5fgn8"] Jan 28 17:27:22 crc kubenswrapper[4811]: I0128 17:27:22.352692 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76e23342-294c-42ca-afa8-ea18f4db3eca" path="/var/lib/kubelet/pods/76e23342-294c-42ca-afa8-ea18f4db3eca/volumes" Jan 28 17:27:27 crc kubenswrapper[4811]: I0128 17:27:27.138340 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-967dbf479-zj5bt" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Jan 28 17:27:31 crc kubenswrapper[4811]: I0128 17:27:31.820279 4811 scope.go:117] "RemoveContainer" containerID="c65204501b8c7681f4ac497f191a551dc77d6e53d042e6d7ac6e4d829a8e263a" Jan 28 17:27:31 crc kubenswrapper[4811]: I0128 17:27:31.858635 4811 scope.go:117] "RemoveContainer" containerID="c3e624b34fc9af6cb8442bd1d866e3967690ec6283ecb05443400dc263e60b8f" Jan 28 17:27:31 crc kubenswrapper[4811]: I0128 17:27:31.906348 4811 scope.go:117] "RemoveContainer" containerID="8294bd68150353c60aa38d7aed0faf462a832b2e09ba418fb5fe5cda05326494" Jan 28 17:27:31 crc kubenswrapper[4811]: I0128 17:27:31.942266 4811 scope.go:117] "RemoveContainer" containerID="7a2f5307b315effa4abf4fcaa14875ca9250487f7bafb81d5a763989c38cc450" Jan 28 17:27:31 crc kubenswrapper[4811]: I0128 17:27:31.986486 4811 scope.go:117] "RemoveContainer" containerID="a5cc4119707b0e0d37d6e992aa17c7f46c1a75ce4b82fbe38756c83ebb8a2970" Jan 28 17:27:32 crc kubenswrapper[4811]: I0128 17:27:32.068127 4811 scope.go:117] "RemoveContainer" containerID="369aa68a0a0a8a7a80bbef37cb9b71506e789c078066bbd3ddcc848f59a96f17" Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.087168 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.087491 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.087532 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.088042 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3dea4adb7c8ad114e1ea2dd4f826ff466865b50d5f1c4a4806ff535fa6793f5b"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.088109 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://3dea4adb7c8ad114e1ea2dd4f826ff466865b50d5f1c4a4806ff535fa6793f5b" gracePeriod=600 Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.514633 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="3dea4adb7c8ad114e1ea2dd4f826ff466865b50d5f1c4a4806ff535fa6793f5b" exitCode=0 Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.514715 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"3dea4adb7c8ad114e1ea2dd4f826ff466865b50d5f1c4a4806ff535fa6793f5b"} Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.514972 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35"} Jan 28 17:27:33 crc kubenswrapper[4811]: I0128 17:27:33.515002 4811 scope.go:117] "RemoveContainer" containerID="3abd2ca52dcea4f0bad2eccd9e9279be836bcb0c1530a2433f35225ded0da091" Jan 28 17:27:37 crc kubenswrapper[4811]: I0128 17:27:37.138033 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-967dbf479-zj5bt" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Jan 28 17:27:37 crc kubenswrapper[4811]: I0128 17:27:37.138811 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.591761 4811 generic.go:334] "Generic (PLEG): container finished" podID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerID="36424f4b376650d076b16b5351a70299f435ba81adfd8e6c03a834a38442cb37" exitCode=137 Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.591968 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-967dbf479-zj5bt" event={"ID":"e3c24a39-106a-4e13-aae3-7e8399c05eb0","Type":"ContainerDied","Data":"36424f4b376650d076b16b5351a70299f435ba81adfd8e6c03a834a38442cb37"} Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.728372 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.884117 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-scripts\") pod \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.884261 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3c24a39-106a-4e13-aae3-7e8399c05eb0-logs\") pod \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.884302 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-config-data\") pod \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.884322 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lb5s\" (UniqueName: \"kubernetes.io/projected/e3c24a39-106a-4e13-aae3-7e8399c05eb0-kube-api-access-6lb5s\") pod \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.884352 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e3c24a39-106a-4e13-aae3-7e8399c05eb0-horizon-secret-key\") pod \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\" (UID: \"e3c24a39-106a-4e13-aae3-7e8399c05eb0\") " Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.884984 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3c24a39-106a-4e13-aae3-7e8399c05eb0-logs" (OuterVolumeSpecName: "logs") pod "e3c24a39-106a-4e13-aae3-7e8399c05eb0" (UID: "e3c24a39-106a-4e13-aae3-7e8399c05eb0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.885704 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3c24a39-106a-4e13-aae3-7e8399c05eb0-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.894821 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3c24a39-106a-4e13-aae3-7e8399c05eb0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e3c24a39-106a-4e13-aae3-7e8399c05eb0" (UID: "e3c24a39-106a-4e13-aae3-7e8399c05eb0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.894857 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3c24a39-106a-4e13-aae3-7e8399c05eb0-kube-api-access-6lb5s" (OuterVolumeSpecName: "kube-api-access-6lb5s") pod "e3c24a39-106a-4e13-aae3-7e8399c05eb0" (UID: "e3c24a39-106a-4e13-aae3-7e8399c05eb0"). InnerVolumeSpecName "kube-api-access-6lb5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.913505 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-config-data" (OuterVolumeSpecName: "config-data") pod "e3c24a39-106a-4e13-aae3-7e8399c05eb0" (UID: "e3c24a39-106a-4e13-aae3-7e8399c05eb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.914651 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-scripts" (OuterVolumeSpecName: "scripts") pod "e3c24a39-106a-4e13-aae3-7e8399c05eb0" (UID: "e3c24a39-106a-4e13-aae3-7e8399c05eb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.988577 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.988622 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lb5s\" (UniqueName: \"kubernetes.io/projected/e3c24a39-106a-4e13-aae3-7e8399c05eb0-kube-api-access-6lb5s\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.988638 4811 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e3c24a39-106a-4e13-aae3-7e8399c05eb0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:41 crc kubenswrapper[4811]: I0128 17:27:41.988650 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3c24a39-106a-4e13-aae3-7e8399c05eb0-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:42 crc kubenswrapper[4811]: I0128 17:27:42.602895 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-967dbf479-zj5bt" event={"ID":"e3c24a39-106a-4e13-aae3-7e8399c05eb0","Type":"ContainerDied","Data":"20bc8d1062f09e8d4c8bd783b053466717e410de5fb2d592d0edf655420f9f06"} Jan 28 17:27:42 crc kubenswrapper[4811]: I0128 17:27:42.603248 4811 scope.go:117] "RemoveContainer" containerID="7384da6fec1e74e9ef1e16b0a6c26184c2ad03dc865677fff64514520e72926a" Jan 28 17:27:42 crc kubenswrapper[4811]: I0128 17:27:42.603016 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-967dbf479-zj5bt" Jan 28 17:27:42 crc kubenswrapper[4811]: I0128 17:27:42.637876 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-967dbf479-zj5bt"] Jan 28 17:27:42 crc kubenswrapper[4811]: I0128 17:27:42.650187 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-967dbf479-zj5bt"] Jan 28 17:27:42 crc kubenswrapper[4811]: I0128 17:27:42.784398 4811 scope.go:117] "RemoveContainer" containerID="36424f4b376650d076b16b5351a70299f435ba81adfd8e6c03a834a38442cb37" Jan 28 17:27:44 crc kubenswrapper[4811]: I0128 17:27:44.353055 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" path="/var/lib/kubelet/pods/e3c24a39-106a-4e13-aae3-7e8399c05eb0/volumes" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.594540 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7877575757-66k4b"] Jan 28 17:27:53 crc kubenswrapper[4811]: E0128 17:27:53.595536 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595553 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" Jan 28 17:27:53 crc kubenswrapper[4811]: E0128 17:27:53.595590 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon-log" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595598 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon-log" Jan 28 17:27:53 crc kubenswrapper[4811]: E0128 17:27:53.595614 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595622 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon" Jan 28 17:27:53 crc kubenswrapper[4811]: E0128 17:27:53.595637 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon-log" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595645 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon-log" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595876 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595893 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3c24a39-106a-4e13-aae3-7e8399c05eb0" containerName="horizon-log" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595907 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.595924 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="651c91bb-dc5d-4416-bece-7647b1447487" containerName="horizon-log" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.597141 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.613283 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7877575757-66k4b"] Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.716299 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/54589627-ab35-452b-8c83-75608ebfe0f4-horizon-secret-key\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.716468 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54589627-ab35-452b-8c83-75608ebfe0f4-scripts\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.716527 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54589627-ab35-452b-8c83-75608ebfe0f4-logs\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.717016 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54589627-ab35-452b-8c83-75608ebfe0f4-config-data\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.717089 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjqts\" (UniqueName: \"kubernetes.io/projected/54589627-ab35-452b-8c83-75608ebfe0f4-kube-api-access-pjqts\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.819052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54589627-ab35-452b-8c83-75608ebfe0f4-config-data\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.819105 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjqts\" (UniqueName: \"kubernetes.io/projected/54589627-ab35-452b-8c83-75608ebfe0f4-kube-api-access-pjqts\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.819163 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/54589627-ab35-452b-8c83-75608ebfe0f4-horizon-secret-key\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.819210 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54589627-ab35-452b-8c83-75608ebfe0f4-scripts\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.819256 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54589627-ab35-452b-8c83-75608ebfe0f4-logs\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.819650 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54589627-ab35-452b-8c83-75608ebfe0f4-logs\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.820041 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54589627-ab35-452b-8c83-75608ebfe0f4-scripts\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.820269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54589627-ab35-452b-8c83-75608ebfe0f4-config-data\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.830857 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/54589627-ab35-452b-8c83-75608ebfe0f4-horizon-secret-key\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.841167 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjqts\" (UniqueName: \"kubernetes.io/projected/54589627-ab35-452b-8c83-75608ebfe0f4-kube-api-access-pjqts\") pod \"horizon-7877575757-66k4b\" (UID: \"54589627-ab35-452b-8c83-75608ebfe0f4\") " pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:53 crc kubenswrapper[4811]: I0128 17:27:53.917127 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7877575757-66k4b" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.432342 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7877575757-66k4b"] Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.727764 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7877575757-66k4b" event={"ID":"54589627-ab35-452b-8c83-75608ebfe0f4","Type":"ContainerStarted","Data":"c125ea01dc1b40d4a9226e165c2c17070c64340dabd3f17aec7463938aa2a5ce"} Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.728133 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7877575757-66k4b" event={"ID":"54589627-ab35-452b-8c83-75608ebfe0f4","Type":"ContainerStarted","Data":"0ca06cd8a4a815b8ab3d8ab0139c91b3da14cc002b8434ede78b9941a49e369f"} Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.779296 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-pw6st"] Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.781022 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-pw6st" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.788477 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-pw6st"] Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.846576 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/278a8293-6eb7-4409-9881-e320c037f063-operator-scripts\") pod \"heat-db-create-pw6st\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " pod="openstack/heat-db-create-pw6st" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.846661 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wglhh\" (UniqueName: \"kubernetes.io/projected/278a8293-6eb7-4409-9881-e320c037f063-kube-api-access-wglhh\") pod \"heat-db-create-pw6st\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " pod="openstack/heat-db-create-pw6st" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.905811 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-7fc9-account-create-update-bmtgf"] Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.907065 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.909128 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.914570 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-7fc9-account-create-update-bmtgf"] Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.948856 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wglhh\" (UniqueName: \"kubernetes.io/projected/278a8293-6eb7-4409-9881-e320c037f063-kube-api-access-wglhh\") pod \"heat-db-create-pw6st\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " pod="openstack/heat-db-create-pw6st" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.949078 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/278a8293-6eb7-4409-9881-e320c037f063-operator-scripts\") pod \"heat-db-create-pw6st\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " pod="openstack/heat-db-create-pw6st" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.950223 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/278a8293-6eb7-4409-9881-e320c037f063-operator-scripts\") pod \"heat-db-create-pw6st\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " pod="openstack/heat-db-create-pw6st" Jan 28 17:27:54 crc kubenswrapper[4811]: I0128 17:27:54.994610 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wglhh\" (UniqueName: \"kubernetes.io/projected/278a8293-6eb7-4409-9881-e320c037f063-kube-api-access-wglhh\") pod \"heat-db-create-pw6st\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " pod="openstack/heat-db-create-pw6st" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.053511 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdpgj\" (UniqueName: \"kubernetes.io/projected/1659b4b2-066e-4aa2-a334-c52f6b132080-kube-api-access-kdpgj\") pod \"heat-7fc9-account-create-update-bmtgf\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.053788 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1659b4b2-066e-4aa2-a334-c52f6b132080-operator-scripts\") pod \"heat-7fc9-account-create-update-bmtgf\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.103853 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-pw6st" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.161021 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdpgj\" (UniqueName: \"kubernetes.io/projected/1659b4b2-066e-4aa2-a334-c52f6b132080-kube-api-access-kdpgj\") pod \"heat-7fc9-account-create-update-bmtgf\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.161099 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1659b4b2-066e-4aa2-a334-c52f6b132080-operator-scripts\") pod \"heat-7fc9-account-create-update-bmtgf\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.162759 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1659b4b2-066e-4aa2-a334-c52f6b132080-operator-scripts\") pod \"heat-7fc9-account-create-update-bmtgf\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.203660 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdpgj\" (UniqueName: \"kubernetes.io/projected/1659b4b2-066e-4aa2-a334-c52f6b132080-kube-api-access-kdpgj\") pod \"heat-7fc9-account-create-update-bmtgf\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.248110 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.739244 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-pw6st"] Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.744079 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7877575757-66k4b" event={"ID":"54589627-ab35-452b-8c83-75608ebfe0f4","Type":"ContainerStarted","Data":"385780e883b72761a095b90644ad82365312286ed47db8c72e370e140073b090"} Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.778484 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7877575757-66k4b" podStartSLOduration=2.778460101 podStartE2EDuration="2.778460101s" podCreationTimestamp="2026-01-28 17:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:27:55.768972554 +0000 UTC m=+6168.523336157" watchObservedRunningTime="2026-01-28 17:27:55.778460101 +0000 UTC m=+6168.532823684" Jan 28 17:27:55 crc kubenswrapper[4811]: W0128 17:27:55.787826 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod278a8293_6eb7_4409_9881_e320c037f063.slice/crio-71db4c9ff1c8511b1c9d33d32e78ba51b8e7774be5ce439bd963b1724c1a16d6 WatchSource:0}: Error finding container 71db4c9ff1c8511b1c9d33d32e78ba51b8e7774be5ce439bd963b1724c1a16d6: Status 404 returned error can't find the container with id 71db4c9ff1c8511b1c9d33d32e78ba51b8e7774be5ce439bd963b1724c1a16d6 Jan 28 17:27:55 crc kubenswrapper[4811]: I0128 17:27:55.907341 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-7fc9-account-create-update-bmtgf"] Jan 28 17:27:56 crc kubenswrapper[4811]: W0128 17:27:56.000509 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1659b4b2_066e_4aa2_a334_c52f6b132080.slice/crio-19ce95f50d3581c3d9933897bd5456141fdc50d1f454b5656f86e84ad81d488d WatchSource:0}: Error finding container 19ce95f50d3581c3d9933897bd5456141fdc50d1f454b5656f86e84ad81d488d: Status 404 returned error can't find the container with id 19ce95f50d3581c3d9933897bd5456141fdc50d1f454b5656f86e84ad81d488d Jan 28 17:27:56 crc kubenswrapper[4811]: I0128 17:27:56.755452 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7fc9-account-create-update-bmtgf" event={"ID":"1659b4b2-066e-4aa2-a334-c52f6b132080","Type":"ContainerStarted","Data":"b5b1d6b98aee5ce31b406e35b2e119378db2b38576d390f062ba1d37e922fde0"} Jan 28 17:27:56 crc kubenswrapper[4811]: I0128 17:27:56.755748 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7fc9-account-create-update-bmtgf" event={"ID":"1659b4b2-066e-4aa2-a334-c52f6b132080","Type":"ContainerStarted","Data":"19ce95f50d3581c3d9933897bd5456141fdc50d1f454b5656f86e84ad81d488d"} Jan 28 17:27:56 crc kubenswrapper[4811]: I0128 17:27:56.759720 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-pw6st" event={"ID":"278a8293-6eb7-4409-9881-e320c037f063","Type":"ContainerStarted","Data":"1ebaabecb6ad8e582d0e3121bd7ea25081ebff8ae0c839b0cc0480f9840dba88"} Jan 28 17:27:56 crc kubenswrapper[4811]: I0128 17:27:56.759791 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-pw6st" event={"ID":"278a8293-6eb7-4409-9881-e320c037f063","Type":"ContainerStarted","Data":"71db4c9ff1c8511b1c9d33d32e78ba51b8e7774be5ce439bd963b1724c1a16d6"} Jan 28 17:27:56 crc kubenswrapper[4811]: I0128 17:27:56.776171 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-7fc9-account-create-update-bmtgf" podStartSLOduration=2.776146894 podStartE2EDuration="2.776146894s" podCreationTimestamp="2026-01-28 17:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:27:56.770204292 +0000 UTC m=+6169.524567875" watchObservedRunningTime="2026-01-28 17:27:56.776146894 +0000 UTC m=+6169.530510477" Jan 28 17:27:56 crc kubenswrapper[4811]: I0128 17:27:56.790386 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-create-pw6st" podStartSLOduration=2.7903656310000002 podStartE2EDuration="2.790365631s" podCreationTimestamp="2026-01-28 17:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:27:56.788186162 +0000 UTC m=+6169.542549755" watchObservedRunningTime="2026-01-28 17:27:56.790365631 +0000 UTC m=+6169.544729204" Jan 28 17:27:57 crc kubenswrapper[4811]: I0128 17:27:57.772465 4811 generic.go:334] "Generic (PLEG): container finished" podID="278a8293-6eb7-4409-9881-e320c037f063" containerID="1ebaabecb6ad8e582d0e3121bd7ea25081ebff8ae0c839b0cc0480f9840dba88" exitCode=0 Jan 28 17:27:57 crc kubenswrapper[4811]: I0128 17:27:57.773710 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-pw6st" event={"ID":"278a8293-6eb7-4409-9881-e320c037f063","Type":"ContainerDied","Data":"1ebaabecb6ad8e582d0e3121bd7ea25081ebff8ae0c839b0cc0480f9840dba88"} Jan 28 17:27:57 crc kubenswrapper[4811]: I0128 17:27:57.776976 4811 generic.go:334] "Generic (PLEG): container finished" podID="1659b4b2-066e-4aa2-a334-c52f6b132080" containerID="b5b1d6b98aee5ce31b406e35b2e119378db2b38576d390f062ba1d37e922fde0" exitCode=0 Jan 28 17:27:57 crc kubenswrapper[4811]: I0128 17:27:57.777026 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7fc9-account-create-update-bmtgf" event={"ID":"1659b4b2-066e-4aa2-a334-c52f6b132080","Type":"ContainerDied","Data":"b5b1d6b98aee5ce31b406e35b2e119378db2b38576d390f062ba1d37e922fde0"} Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.248615 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.255219 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-pw6st" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.355007 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1659b4b2-066e-4aa2-a334-c52f6b132080-operator-scripts\") pod \"1659b4b2-066e-4aa2-a334-c52f6b132080\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.355247 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/278a8293-6eb7-4409-9881-e320c037f063-operator-scripts\") pod \"278a8293-6eb7-4409-9881-e320c037f063\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.355275 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wglhh\" (UniqueName: \"kubernetes.io/projected/278a8293-6eb7-4409-9881-e320c037f063-kube-api-access-wglhh\") pod \"278a8293-6eb7-4409-9881-e320c037f063\" (UID: \"278a8293-6eb7-4409-9881-e320c037f063\") " Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.355400 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdpgj\" (UniqueName: \"kubernetes.io/projected/1659b4b2-066e-4aa2-a334-c52f6b132080-kube-api-access-kdpgj\") pod \"1659b4b2-066e-4aa2-a334-c52f6b132080\" (UID: \"1659b4b2-066e-4aa2-a334-c52f6b132080\") " Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.355594 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1659b4b2-066e-4aa2-a334-c52f6b132080-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1659b4b2-066e-4aa2-a334-c52f6b132080" (UID: "1659b4b2-066e-4aa2-a334-c52f6b132080"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.355890 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1659b4b2-066e-4aa2-a334-c52f6b132080-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.356008 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/278a8293-6eb7-4409-9881-e320c037f063-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "278a8293-6eb7-4409-9881-e320c037f063" (UID: "278a8293-6eb7-4409-9881-e320c037f063"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.360871 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/278a8293-6eb7-4409-9881-e320c037f063-kube-api-access-wglhh" (OuterVolumeSpecName: "kube-api-access-wglhh") pod "278a8293-6eb7-4409-9881-e320c037f063" (UID: "278a8293-6eb7-4409-9881-e320c037f063"). InnerVolumeSpecName "kube-api-access-wglhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.369332 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1659b4b2-066e-4aa2-a334-c52f6b132080-kube-api-access-kdpgj" (OuterVolumeSpecName: "kube-api-access-kdpgj") pod "1659b4b2-066e-4aa2-a334-c52f6b132080" (UID: "1659b4b2-066e-4aa2-a334-c52f6b132080"). InnerVolumeSpecName "kube-api-access-kdpgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.457949 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdpgj\" (UniqueName: \"kubernetes.io/projected/1659b4b2-066e-4aa2-a334-c52f6b132080-kube-api-access-kdpgj\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.457994 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/278a8293-6eb7-4409-9881-e320c037f063-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.458004 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wglhh\" (UniqueName: \"kubernetes.io/projected/278a8293-6eb7-4409-9881-e320c037f063-kube-api-access-wglhh\") on node \"crc\" DevicePath \"\"" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.795096 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7fc9-account-create-update-bmtgf" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.795102 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7fc9-account-create-update-bmtgf" event={"ID":"1659b4b2-066e-4aa2-a334-c52f6b132080","Type":"ContainerDied","Data":"19ce95f50d3581c3d9933897bd5456141fdc50d1f454b5656f86e84ad81d488d"} Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.795241 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19ce95f50d3581c3d9933897bd5456141fdc50d1f454b5656f86e84ad81d488d" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.796874 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-pw6st" event={"ID":"278a8293-6eb7-4409-9881-e320c037f063","Type":"ContainerDied","Data":"71db4c9ff1c8511b1c9d33d32e78ba51b8e7774be5ce439bd963b1724c1a16d6"} Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.796901 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-pw6st" Jan 28 17:27:59 crc kubenswrapper[4811]: I0128 17:27:59.796911 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71db4c9ff1c8511b1c9d33d32e78ba51b8e7774be5ce439bd963b1724c1a16d6" Jan 28 17:28:03 crc kubenswrapper[4811]: I0128 17:28:03.917270 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7877575757-66k4b" Jan 28 17:28:03 crc kubenswrapper[4811]: I0128 17:28:03.918839 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7877575757-66k4b" Jan 28 17:28:04 crc kubenswrapper[4811]: I0128 17:28:04.049881 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-b227-account-create-update-74djd"] Jan 28 17:28:04 crc kubenswrapper[4811]: I0128 17:28:04.062773 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-t4fqr"] Jan 28 17:28:04 crc kubenswrapper[4811]: I0128 17:28:04.075935 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-b227-account-create-update-74djd"] Jan 28 17:28:04 crc kubenswrapper[4811]: I0128 17:28:04.086045 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-t4fqr"] Jan 28 17:28:04 crc kubenswrapper[4811]: I0128 17:28:04.350291 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79b975a8-25eb-4580-b8d0-786b2efe59b0" path="/var/lib/kubelet/pods/79b975a8-25eb-4580-b8d0-786b2efe59b0/volumes" Jan 28 17:28:04 crc kubenswrapper[4811]: I0128 17:28:04.350952 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afaa6d92-7733-4a6e-92d5-860e63786a16" path="/var/lib/kubelet/pods/afaa6d92-7733-4a6e-92d5-860e63786a16/volumes" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.020054 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-qzbhp"] Jan 28 17:28:05 crc kubenswrapper[4811]: E0128 17:28:05.026058 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1659b4b2-066e-4aa2-a334-c52f6b132080" containerName="mariadb-account-create-update" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.026094 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="1659b4b2-066e-4aa2-a334-c52f6b132080" containerName="mariadb-account-create-update" Jan 28 17:28:05 crc kubenswrapper[4811]: E0128 17:28:05.026121 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="278a8293-6eb7-4409-9881-e320c037f063" containerName="mariadb-database-create" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.026128 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="278a8293-6eb7-4409-9881-e320c037f063" containerName="mariadb-database-create" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.026424 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="1659b4b2-066e-4aa2-a334-c52f6b132080" containerName="mariadb-account-create-update" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.026468 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="278a8293-6eb7-4409-9881-e320c037f063" containerName="mariadb-database-create" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.027358 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.030588 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-r92d2" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.030736 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.044292 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-qzbhp"] Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.176357 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsh75\" (UniqueName: \"kubernetes.io/projected/e3eb0e86-9952-4769-b877-e76c7c12b8e2-kube-api-access-qsh75\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.176564 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-combined-ca-bundle\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.176618 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-config-data\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.278853 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsh75\" (UniqueName: \"kubernetes.io/projected/e3eb0e86-9952-4769-b877-e76c7c12b8e2-kube-api-access-qsh75\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.279008 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-combined-ca-bundle\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.279049 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-config-data\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.285903 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-combined-ca-bundle\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.290215 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-config-data\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.296522 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsh75\" (UniqueName: \"kubernetes.io/projected/e3eb0e86-9952-4769-b877-e76c7c12b8e2-kube-api-access-qsh75\") pod \"heat-db-sync-qzbhp\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:05 crc kubenswrapper[4811]: I0128 17:28:05.351704 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:06 crc kubenswrapper[4811]: I0128 17:28:06.011665 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-qzbhp"] Jan 28 17:28:06 crc kubenswrapper[4811]: W0128 17:28:06.013782 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3eb0e86_9952_4769_b877_e76c7c12b8e2.slice/crio-06bcd5ff0c0110cc87a39e36689f64a79fb1f926d4268d07546e32415953e717 WatchSource:0}: Error finding container 06bcd5ff0c0110cc87a39e36689f64a79fb1f926d4268d07546e32415953e717: Status 404 returned error can't find the container with id 06bcd5ff0c0110cc87a39e36689f64a79fb1f926d4268d07546e32415953e717 Jan 28 17:28:06 crc kubenswrapper[4811]: I0128 17:28:06.863596 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qzbhp" event={"ID":"e3eb0e86-9952-4769-b877-e76c7c12b8e2","Type":"ContainerStarted","Data":"06bcd5ff0c0110cc87a39e36689f64a79fb1f926d4268d07546e32415953e717"} Jan 28 17:28:12 crc kubenswrapper[4811]: I0128 17:28:12.040092 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-wjtzr"] Jan 28 17:28:12 crc kubenswrapper[4811]: I0128 17:28:12.054069 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-wjtzr"] Jan 28 17:28:12 crc kubenswrapper[4811]: I0128 17:28:12.357554 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f00cc50-d228-4d46-9778-d0c6db4a23fa" path="/var/lib/kubelet/pods/6f00cc50-d228-4d46-9778-d0c6db4a23fa/volumes" Jan 28 17:28:13 crc kubenswrapper[4811]: I0128 17:28:13.920764 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7877575757-66k4b" podUID="54589627-ab35-452b-8c83-75608ebfe0f4" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Jan 28 17:28:14 crc kubenswrapper[4811]: I0128 17:28:14.976950 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qzbhp" event={"ID":"e3eb0e86-9952-4769-b877-e76c7c12b8e2","Type":"ContainerStarted","Data":"26f0cb8d8a616ead906239adf1f4466d5e1db5b635596fa410df2755a48aefb5"} Jan 28 17:28:15 crc kubenswrapper[4811]: I0128 17:28:15.006136 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-qzbhp" podStartSLOduration=3.29958754 podStartE2EDuration="11.006103827s" podCreationTimestamp="2026-01-28 17:28:04 +0000 UTC" firstStartedPulling="2026-01-28 17:28:06.017164493 +0000 UTC m=+6178.771528076" lastFinishedPulling="2026-01-28 17:28:13.72368078 +0000 UTC m=+6186.478044363" observedRunningTime="2026-01-28 17:28:14.992797856 +0000 UTC m=+6187.747161439" watchObservedRunningTime="2026-01-28 17:28:15.006103827 +0000 UTC m=+6187.760467420" Jan 28 17:28:15 crc kubenswrapper[4811]: I0128 17:28:15.993185 4811 generic.go:334] "Generic (PLEG): container finished" podID="e3eb0e86-9952-4769-b877-e76c7c12b8e2" containerID="26f0cb8d8a616ead906239adf1f4466d5e1db5b635596fa410df2755a48aefb5" exitCode=0 Jan 28 17:28:15 crc kubenswrapper[4811]: I0128 17:28:15.993508 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qzbhp" event={"ID":"e3eb0e86-9952-4769-b877-e76c7c12b8e2","Type":"ContainerDied","Data":"26f0cb8d8a616ead906239adf1f4466d5e1db5b635596fa410df2755a48aefb5"} Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.391796 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.553737 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-config-data\") pod \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.553929 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-combined-ca-bundle\") pod \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.553987 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsh75\" (UniqueName: \"kubernetes.io/projected/e3eb0e86-9952-4769-b877-e76c7c12b8e2-kube-api-access-qsh75\") pod \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\" (UID: \"e3eb0e86-9952-4769-b877-e76c7c12b8e2\") " Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.559124 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3eb0e86-9952-4769-b877-e76c7c12b8e2-kube-api-access-qsh75" (OuterVolumeSpecName: "kube-api-access-qsh75") pod "e3eb0e86-9952-4769-b877-e76c7c12b8e2" (UID: "e3eb0e86-9952-4769-b877-e76c7c12b8e2"). InnerVolumeSpecName "kube-api-access-qsh75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.586676 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3eb0e86-9952-4769-b877-e76c7c12b8e2" (UID: "e3eb0e86-9952-4769-b877-e76c7c12b8e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.623671 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-config-data" (OuterVolumeSpecName: "config-data") pod "e3eb0e86-9952-4769-b877-e76c7c12b8e2" (UID: "e3eb0e86-9952-4769-b877-e76c7c12b8e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.655943 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.655978 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eb0e86-9952-4769-b877-e76c7c12b8e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:17 crc kubenswrapper[4811]: I0128 17:28:17.655988 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsh75\" (UniqueName: \"kubernetes.io/projected/e3eb0e86-9952-4769-b877-e76c7c12b8e2-kube-api-access-qsh75\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:18 crc kubenswrapper[4811]: I0128 17:28:18.013595 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qzbhp" event={"ID":"e3eb0e86-9952-4769-b877-e76c7c12b8e2","Type":"ContainerDied","Data":"06bcd5ff0c0110cc87a39e36689f64a79fb1f926d4268d07546e32415953e717"} Jan 28 17:28:18 crc kubenswrapper[4811]: I0128 17:28:18.014043 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06bcd5ff0c0110cc87a39e36689f64a79fb1f926d4268d07546e32415953e717" Jan 28 17:28:18 crc kubenswrapper[4811]: I0128 17:28:18.013651 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qzbhp" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.055653 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-57b6c595c4-c879c"] Jan 28 17:28:19 crc kubenswrapper[4811]: E0128 17:28:19.056451 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eb0e86-9952-4769-b877-e76c7c12b8e2" containerName="heat-db-sync" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.056466 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eb0e86-9952-4769-b877-e76c7c12b8e2" containerName="heat-db-sync" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.056699 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3eb0e86-9952-4769-b877-e76c7c12b8e2" containerName="heat-db-sync" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.057453 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.060901 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.061097 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-r92d2" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.062621 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.068407 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-57b6c595c4-c879c"] Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.147869 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6f666d6f4f-6sf5f"] Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.149277 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.151559 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.169872 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6f666d6f4f-6sf5f"] Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.188538 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-config-data-custom\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.188718 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq4nl\" (UniqueName: \"kubernetes.io/projected/7efd6d3f-0f9f-4096-a40f-37610326c0a4-kube-api-access-qq4nl\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.188792 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-combined-ca-bundle\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.188852 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-config-data\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.249377 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-c69bf9c77-tj5lc"] Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.250711 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.256683 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.258835 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c69bf9c77-tj5lc"] Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.290495 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-combined-ca-bundle\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.290627 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-config-data\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.291540 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-config-data\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.291596 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-config-data-custom\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.291625 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqgtl\" (UniqueName: \"kubernetes.io/projected/5108be06-d041-4e4d-9655-1de093f3b10a-kube-api-access-mqgtl\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.291670 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-combined-ca-bundle\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.291712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-config-data-custom\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.292061 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq4nl\" (UniqueName: \"kubernetes.io/projected/7efd6d3f-0f9f-4096-a40f-37610326c0a4-kube-api-access-qq4nl\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.298113 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-combined-ca-bundle\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.298494 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-config-data\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.303252 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7efd6d3f-0f9f-4096-a40f-37610326c0a4-config-data-custom\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.313420 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq4nl\" (UniqueName: \"kubernetes.io/projected/7efd6d3f-0f9f-4096-a40f-37610326c0a4-kube-api-access-qq4nl\") pod \"heat-engine-57b6c595c4-c879c\" (UID: \"7efd6d3f-0f9f-4096-a40f-37610326c0a4\") " pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.378371 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.393741 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-config-data\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.393822 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqgtl\" (UniqueName: \"kubernetes.io/projected/5108be06-d041-4e4d-9655-1de093f3b10a-kube-api-access-mqgtl\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.393864 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-combined-ca-bundle\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.393889 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8rdr\" (UniqueName: \"kubernetes.io/projected/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-kube-api-access-w8rdr\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.393922 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-combined-ca-bundle\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.393950 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-config-data-custom\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.394015 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-config-data-custom\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.394089 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-config-data\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.401068 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-combined-ca-bundle\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.401269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-config-data-custom\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.404573 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5108be06-d041-4e4d-9655-1de093f3b10a-config-data\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.411940 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqgtl\" (UniqueName: \"kubernetes.io/projected/5108be06-d041-4e4d-9655-1de093f3b10a-kube-api-access-mqgtl\") pod \"heat-cfnapi-6f666d6f4f-6sf5f\" (UID: \"5108be06-d041-4e4d-9655-1de093f3b10a\") " pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.471571 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.498896 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8rdr\" (UniqueName: \"kubernetes.io/projected/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-kube-api-access-w8rdr\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.498984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-combined-ca-bundle\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.499217 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-config-data-custom\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.499492 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-config-data\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.504524 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-config-data-custom\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.505561 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-combined-ca-bundle\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.505946 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-config-data\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.518400 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8rdr\" (UniqueName: \"kubernetes.io/projected/5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe-kube-api-access-w8rdr\") pod \"heat-api-c69bf9c77-tj5lc\" (UID: \"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe\") " pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:19 crc kubenswrapper[4811]: I0128 17:28:19.578043 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:20 crc kubenswrapper[4811]: I0128 17:28:20.010306 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-57b6c595c4-c879c"] Jan 28 17:28:20 crc kubenswrapper[4811]: I0128 17:28:20.034587 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-57b6c595c4-c879c" event={"ID":"7efd6d3f-0f9f-4096-a40f-37610326c0a4","Type":"ContainerStarted","Data":"8145dd5c1d70fb07ea9f6b2f70747e5e468d604a6aaa1bfa829e6220e2c36abf"} Jan 28 17:28:20 crc kubenswrapper[4811]: W0128 17:28:20.146356 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5108be06_d041_4e4d_9655_1de093f3b10a.slice/crio-768ea9765c94aaeb3af3de7f912a30d6083ab2136d2367c54f56a040c184e406 WatchSource:0}: Error finding container 768ea9765c94aaeb3af3de7f912a30d6083ab2136d2367c54f56a040c184e406: Status 404 returned error can't find the container with id 768ea9765c94aaeb3af3de7f912a30d6083ab2136d2367c54f56a040c184e406 Jan 28 17:28:20 crc kubenswrapper[4811]: I0128 17:28:20.153943 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6f666d6f4f-6sf5f"] Jan 28 17:28:20 crc kubenswrapper[4811]: I0128 17:28:20.250855 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c69bf9c77-tj5lc"] Jan 28 17:28:20 crc kubenswrapper[4811]: W0128 17:28:20.252350 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b54a3b7_5ff0_4b4c_ae89_a052aa412ebe.slice/crio-eeaea852ee14f6cf226138cc75b2c397d9341f82f525e75d620ab8313560a3b7 WatchSource:0}: Error finding container eeaea852ee14f6cf226138cc75b2c397d9341f82f525e75d620ab8313560a3b7: Status 404 returned error can't find the container with id eeaea852ee14f6cf226138cc75b2c397d9341f82f525e75d620ab8313560a3b7 Jan 28 17:28:21 crc kubenswrapper[4811]: I0128 17:28:21.159961 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c69bf9c77-tj5lc" event={"ID":"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe","Type":"ContainerStarted","Data":"eeaea852ee14f6cf226138cc75b2c397d9341f82f525e75d620ab8313560a3b7"} Jan 28 17:28:21 crc kubenswrapper[4811]: I0128 17:28:21.182827 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-57b6c595c4-c879c" event={"ID":"7efd6d3f-0f9f-4096-a40f-37610326c0a4","Type":"ContainerStarted","Data":"4952c4e9b2a84ca786af02d6dc83ce75184bcb4687c4a2b0f63583ec10491ecc"} Jan 28 17:28:21 crc kubenswrapper[4811]: I0128 17:28:21.183106 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:21 crc kubenswrapper[4811]: I0128 17:28:21.189123 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" event={"ID":"5108be06-d041-4e4d-9655-1de093f3b10a","Type":"ContainerStarted","Data":"768ea9765c94aaeb3af3de7f912a30d6083ab2136d2367c54f56a040c184e406"} Jan 28 17:28:21 crc kubenswrapper[4811]: I0128 17:28:21.213504 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-57b6c595c4-c879c" podStartSLOduration=2.213480835 podStartE2EDuration="2.213480835s" podCreationTimestamp="2026-01-28 17:28:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:28:21.202071605 +0000 UTC m=+6193.956435208" watchObservedRunningTime="2026-01-28 17:28:21.213480835 +0000 UTC m=+6193.967844418" Jan 28 17:28:25 crc kubenswrapper[4811]: I0128 17:28:25.229870 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c69bf9c77-tj5lc" event={"ID":"5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe","Type":"ContainerStarted","Data":"728ff151473e006bbd506f152476fc405c584329943b4f1858e0bf3c97d1c906"} Jan 28 17:28:25 crc kubenswrapper[4811]: I0128 17:28:25.230376 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:25 crc kubenswrapper[4811]: I0128 17:28:25.232122 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" event={"ID":"5108be06-d041-4e4d-9655-1de093f3b10a","Type":"ContainerStarted","Data":"f74b6665237fb9a6d5c732fdcac34f3a4893cd5a29a396d3abf2e63fa56f7d74"} Jan 28 17:28:25 crc kubenswrapper[4811]: I0128 17:28:25.232406 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:25 crc kubenswrapper[4811]: I0128 17:28:25.257801 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-c69bf9c77-tj5lc" podStartSLOduration=2.385896402 podStartE2EDuration="6.257781472s" podCreationTimestamp="2026-01-28 17:28:19 +0000 UTC" firstStartedPulling="2026-01-28 17:28:20.254708604 +0000 UTC m=+6193.009072187" lastFinishedPulling="2026-01-28 17:28:24.126593674 +0000 UTC m=+6196.880957257" observedRunningTime="2026-01-28 17:28:25.245186599 +0000 UTC m=+6197.999550182" watchObservedRunningTime="2026-01-28 17:28:25.257781472 +0000 UTC m=+6198.012145055" Jan 28 17:28:25 crc kubenswrapper[4811]: I0128 17:28:25.265113 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" podStartSLOduration=2.287519841 podStartE2EDuration="6.265091951s" podCreationTimestamp="2026-01-28 17:28:19 +0000 UTC" firstStartedPulling="2026-01-28 17:28:20.150025132 +0000 UTC m=+6192.904388715" lastFinishedPulling="2026-01-28 17:28:24.127597242 +0000 UTC m=+6196.881960825" observedRunningTime="2026-01-28 17:28:25.263917329 +0000 UTC m=+6198.018280922" watchObservedRunningTime="2026-01-28 17:28:25.265091951 +0000 UTC m=+6198.019455534" Jan 28 17:28:26 crc kubenswrapper[4811]: I0128 17:28:26.449841 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7877575757-66k4b" Jan 28 17:28:28 crc kubenswrapper[4811]: I0128 17:28:28.350980 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7877575757-66k4b" Jan 28 17:28:28 crc kubenswrapper[4811]: I0128 17:28:28.416873 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5ccf78dbdf-rqvtn"] Jan 28 17:28:28 crc kubenswrapper[4811]: I0128 17:28:28.417148 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5ccf78dbdf-rqvtn" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon-log" containerID="cri-o://b3f2c6f6d78081092b8603383147e066d98fa0c30d74c32eab677f3241ecbd27" gracePeriod=30 Jan 28 17:28:28 crc kubenswrapper[4811]: I0128 17:28:28.417233 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5ccf78dbdf-rqvtn" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" containerID="cri-o://d745390f451d5e572a29326b925407075300afd39e37d98e60d311db5c5c612d" gracePeriod=30 Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.440120 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-grxks"] Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.442892 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.452804 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grxks"] Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.483589 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f5fbe9d-0807-4528-a821-6b3ecd64236f-catalog-content\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.483644 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9m8m\" (UniqueName: \"kubernetes.io/projected/8f5fbe9d-0807-4528-a821-6b3ecd64236f-kube-api-access-j9m8m\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.484271 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f5fbe9d-0807-4528-a821-6b3ecd64236f-utilities\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.585915 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9m8m\" (UniqueName: \"kubernetes.io/projected/8f5fbe9d-0807-4528-a821-6b3ecd64236f-kube-api-access-j9m8m\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.586188 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f5fbe9d-0807-4528-a821-6b3ecd64236f-utilities\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.586263 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f5fbe9d-0807-4528-a821-6b3ecd64236f-catalog-content\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.586821 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f5fbe9d-0807-4528-a821-6b3ecd64236f-catalog-content\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.587537 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f5fbe9d-0807-4528-a821-6b3ecd64236f-utilities\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.608029 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9m8m\" (UniqueName: \"kubernetes.io/projected/8f5fbe9d-0807-4528-a821-6b3ecd64236f-kube-api-access-j9m8m\") pod \"community-operators-grxks\" (UID: \"8f5fbe9d-0807-4528-a821-6b3ecd64236f\") " pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:30 crc kubenswrapper[4811]: I0128 17:28:30.812111 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:31 crc kubenswrapper[4811]: I0128 17:28:31.205657 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-6f666d6f4f-6sf5f" Jan 28 17:28:31 crc kubenswrapper[4811]: I0128 17:28:31.459398 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-c69bf9c77-tj5lc" Jan 28 17:28:31 crc kubenswrapper[4811]: I0128 17:28:31.478499 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grxks"] Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.291588 4811 generic.go:334] "Generic (PLEG): container finished" podID="8f5fbe9d-0807-4528-a821-6b3ecd64236f" containerID="2d581135f6abbe5c735c9ba7434bcb0cbf15ff7f8bcd76acdab92e847ab92a17" exitCode=0 Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.291899 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxks" event={"ID":"8f5fbe9d-0807-4528-a821-6b3ecd64236f","Type":"ContainerDied","Data":"2d581135f6abbe5c735c9ba7434bcb0cbf15ff7f8bcd76acdab92e847ab92a17"} Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.291927 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxks" event={"ID":"8f5fbe9d-0807-4528-a821-6b3ecd64236f","Type":"ContainerStarted","Data":"172e6b6773c6d3f2c9c47ad15079328cf0d4cd7378d38fb94fe2efe0d864aa04"} Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.294236 4811 generic.go:334] "Generic (PLEG): container finished" podID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerID="d745390f451d5e572a29326b925407075300afd39e37d98e60d311db5c5c612d" exitCode=0 Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.294283 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ccf78dbdf-rqvtn" event={"ID":"3c3541c8-9738-4b90-92cd-d8676444ef01","Type":"ContainerDied","Data":"d745390f451d5e572a29326b925407075300afd39e37d98e60d311db5c5c612d"} Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.343301 4811 scope.go:117] "RemoveContainer" containerID="85a5f554cfb121a592972aa192922edb9cdbdc5399a5c7364bfafe964e55b4ab" Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.392594 4811 scope.go:117] "RemoveContainer" containerID="0575bbaea2b2aa570c7a1620ff0b4f4ac859eb3031572cf984ec3bc3b806de78" Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.421470 4811 scope.go:117] "RemoveContainer" containerID="1e1733d378109a00a97c562acd9b732f488471f80b7f3d012d93184f9c7b6e2a" Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.484589 4811 scope.go:117] "RemoveContainer" containerID="390f2c5b66b556e2fab0b468ed4224432b34169005478c36879593310fea6e27" Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.574671 4811 scope.go:117] "RemoveContainer" containerID="db515d500220b6dbaa3dbee20dc0ed5b788ff8b093ee30b731f312ef0f4b6dd2" Jan 28 17:28:32 crc kubenswrapper[4811]: I0128 17:28:32.615818 4811 scope.go:117] "RemoveContainer" containerID="36a7ab5f4dfcb0bd6c4af9a30d7c3d2d0a826de22e2a78c1b89b06f38ce51223" Jan 28 17:28:37 crc kubenswrapper[4811]: I0128 17:28:37.283393 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5ccf78dbdf-rqvtn" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.113:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8080: connect: connection refused" Jan 28 17:28:39 crc kubenswrapper[4811]: I0128 17:28:39.361639 4811 generic.go:334] "Generic (PLEG): container finished" podID="8f5fbe9d-0807-4528-a821-6b3ecd64236f" containerID="a67f65811c9902ca22a5051c67db15d42e8df10c1002a82f8da74643dcaca03a" exitCode=0 Jan 28 17:28:39 crc kubenswrapper[4811]: I0128 17:28:39.361691 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxks" event={"ID":"8f5fbe9d-0807-4528-a821-6b3ecd64236f","Type":"ContainerDied","Data":"a67f65811c9902ca22a5051c67db15d42e8df10c1002a82f8da74643dcaca03a"} Jan 28 17:28:39 crc kubenswrapper[4811]: I0128 17:28:39.412650 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-57b6c595c4-c879c" Jan 28 17:28:43 crc kubenswrapper[4811]: I0128 17:28:43.053284 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-h8njg"] Jan 28 17:28:43 crc kubenswrapper[4811]: I0128 17:28:43.065553 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-h8njg"] Jan 28 17:28:44 crc kubenswrapper[4811]: I0128 17:28:44.032519 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-dd96-account-create-update-vw7j6"] Jan 28 17:28:44 crc kubenswrapper[4811]: I0128 17:28:44.043118 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-dd96-account-create-update-vw7j6"] Jan 28 17:28:44 crc kubenswrapper[4811]: I0128 17:28:44.350492 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f5c029-7f55-4c4a-a496-a068e9e96521" path="/var/lib/kubelet/pods/14f5c029-7f55-4c4a-a496-a068e9e96521/volumes" Jan 28 17:28:44 crc kubenswrapper[4811]: I0128 17:28:44.368721 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f6c7f6b-5422-45d6-b899-c145e27f5010" path="/var/lib/kubelet/pods/6f6c7f6b-5422-45d6-b899-c145e27f5010/volumes" Jan 28 17:28:45 crc kubenswrapper[4811]: I0128 17:28:45.427346 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxks" event={"ID":"8f5fbe9d-0807-4528-a821-6b3ecd64236f","Type":"ContainerStarted","Data":"5e45c261f2e9acb50e24ede806beb579cd77feca432b5d138411ea4b349a5881"} Jan 28 17:28:45 crc kubenswrapper[4811]: I0128 17:28:45.467198 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-grxks" podStartSLOduration=3.322333661 podStartE2EDuration="15.467158165s" podCreationTimestamp="2026-01-28 17:28:30 +0000 UTC" firstStartedPulling="2026-01-28 17:28:32.294024494 +0000 UTC m=+6205.048388067" lastFinishedPulling="2026-01-28 17:28:44.438848988 +0000 UTC m=+6217.193212571" observedRunningTime="2026-01-28 17:28:45.462858808 +0000 UTC m=+6218.217222401" watchObservedRunningTime="2026-01-28 17:28:45.467158165 +0000 UTC m=+6218.221521748" Jan 28 17:28:47 crc kubenswrapper[4811]: I0128 17:28:47.283930 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5ccf78dbdf-rqvtn" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.113:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8080: connect: connection refused" Jan 28 17:28:50 crc kubenswrapper[4811]: I0128 17:28:50.813006 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:50 crc kubenswrapper[4811]: I0128 17:28:50.813451 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-grxks" Jan 28 17:28:51 crc kubenswrapper[4811]: I0128 17:28:51.864306 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-grxks" podUID="8f5fbe9d-0807-4528-a821-6b3ecd64236f" containerName="registry-server" probeResult="failure" output=< Jan 28 17:28:51 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:28:51 crc kubenswrapper[4811]: > Jan 28 17:28:54 crc kubenswrapper[4811]: I0128 17:28:54.043054 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-zrw2c"] Jan 28 17:28:54 crc kubenswrapper[4811]: I0128 17:28:54.053812 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-zrw2c"] Jan 28 17:28:54 crc kubenswrapper[4811]: I0128 17:28:54.353253 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3320608-2d14-467b-9fc6-5aece23f7f84" path="/var/lib/kubelet/pods/e3320608-2d14-467b-9fc6-5aece23f7f84/volumes" Jan 28 17:28:57 crc kubenswrapper[4811]: I0128 17:28:57.283155 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5ccf78dbdf-rqvtn" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.113:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8080: connect: connection refused" Jan 28 17:28:57 crc kubenswrapper[4811]: I0128 17:28:57.284725 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:28:58 crc kubenswrapper[4811]: I0128 17:28:58.563384 4811 generic.go:334] "Generic (PLEG): container finished" podID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerID="b3f2c6f6d78081092b8603383147e066d98fa0c30d74c32eab677f3241ecbd27" exitCode=137 Jan 28 17:28:58 crc kubenswrapper[4811]: I0128 17:28:58.563464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ccf78dbdf-rqvtn" event={"ID":"3c3541c8-9738-4b90-92cd-d8676444ef01","Type":"ContainerDied","Data":"b3f2c6f6d78081092b8603383147e066d98fa0c30d74c32eab677f3241ecbd27"} Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.054788 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.191135 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-config-data\") pod \"3c3541c8-9738-4b90-92cd-d8676444ef01\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.191325 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-scripts\") pod \"3c3541c8-9738-4b90-92cd-d8676444ef01\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.191354 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3c3541c8-9738-4b90-92cd-d8676444ef01-horizon-secret-key\") pod \"3c3541c8-9738-4b90-92cd-d8676444ef01\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.191529 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3541c8-9738-4b90-92cd-d8676444ef01-logs\") pod \"3c3541c8-9738-4b90-92cd-d8676444ef01\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.191673 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4224m\" (UniqueName: \"kubernetes.io/projected/3c3541c8-9738-4b90-92cd-d8676444ef01-kube-api-access-4224m\") pod \"3c3541c8-9738-4b90-92cd-d8676444ef01\" (UID: \"3c3541c8-9738-4b90-92cd-d8676444ef01\") " Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.192275 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c3541c8-9738-4b90-92cd-d8676444ef01-logs" (OuterVolumeSpecName: "logs") pod "3c3541c8-9738-4b90-92cd-d8676444ef01" (UID: "3c3541c8-9738-4b90-92cd-d8676444ef01"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.198718 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c3541c8-9738-4b90-92cd-d8676444ef01-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "3c3541c8-9738-4b90-92cd-d8676444ef01" (UID: "3c3541c8-9738-4b90-92cd-d8676444ef01"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.199031 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c3541c8-9738-4b90-92cd-d8676444ef01-kube-api-access-4224m" (OuterVolumeSpecName: "kube-api-access-4224m") pod "3c3541c8-9738-4b90-92cd-d8676444ef01" (UID: "3c3541c8-9738-4b90-92cd-d8676444ef01"). InnerVolumeSpecName "kube-api-access-4224m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.218184 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-scripts" (OuterVolumeSpecName: "scripts") pod "3c3541c8-9738-4b90-92cd-d8676444ef01" (UID: "3c3541c8-9738-4b90-92cd-d8676444ef01"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.223819 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-config-data" (OuterVolumeSpecName: "config-data") pod "3c3541c8-9738-4b90-92cd-d8676444ef01" (UID: "3c3541c8-9738-4b90-92cd-d8676444ef01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.294073 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4224m\" (UniqueName: \"kubernetes.io/projected/3c3541c8-9738-4b90-92cd-d8676444ef01-kube-api-access-4224m\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.294107 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.294117 4811 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3c3541c8-9738-4b90-92cd-d8676444ef01-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.294125 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c3541c8-9738-4b90-92cd-d8676444ef01-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.294133 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3541c8-9738-4b90-92cd-d8676444ef01-logs\") on node \"crc\" DevicePath \"\"" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.579365 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ccf78dbdf-rqvtn" event={"ID":"3c3541c8-9738-4b90-92cd-d8676444ef01","Type":"ContainerDied","Data":"35583025378a9e2bcfa17cfa2f7bcefd369c4fbf53acb841723ae0ceea9c8f7f"} Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.579606 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ccf78dbdf-rqvtn" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.580268 4811 scope.go:117] "RemoveContainer" containerID="d745390f451d5e572a29326b925407075300afd39e37d98e60d311db5c5c612d" Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.623867 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5ccf78dbdf-rqvtn"] Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.635069 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5ccf78dbdf-rqvtn"] Jan 28 17:28:59 crc kubenswrapper[4811]: I0128 17:28:59.753196 4811 scope.go:117] "RemoveContainer" containerID="b3f2c6f6d78081092b8603383147e066d98fa0c30d74c32eab677f3241ecbd27" Jan 28 17:29:00 crc kubenswrapper[4811]: I0128 17:29:00.352401 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" path="/var/lib/kubelet/pods/3c3541c8-9738-4b90-92cd-d8676444ef01/volumes" Jan 28 17:29:00 crc kubenswrapper[4811]: I0128 17:29:00.865145 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-grxks" Jan 28 17:29:00 crc kubenswrapper[4811]: I0128 17:29:00.919391 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-grxks" Jan 28 17:29:01 crc kubenswrapper[4811]: I0128 17:29:01.454152 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grxks"] Jan 28 17:29:01 crc kubenswrapper[4811]: I0128 17:29:01.640186 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 17:29:01 crc kubenswrapper[4811]: I0128 17:29:01.640468 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j5blb" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="registry-server" containerID="cri-o://8544410b22139b8422837506ce3f85d0e855893dd94d7de451d14e7b44fc0464" gracePeriod=2 Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.616629 4811 generic.go:334] "Generic (PLEG): container finished" podID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerID="8544410b22139b8422837506ce3f85d0e855893dd94d7de451d14e7b44fc0464" exitCode=0 Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.617858 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j5blb" event={"ID":"2178b105-4528-4a3c-be83-c0c4280e40b5","Type":"ContainerDied","Data":"8544410b22139b8422837506ce3f85d0e855893dd94d7de451d14e7b44fc0464"} Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.617889 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j5blb" event={"ID":"2178b105-4528-4a3c-be83-c0c4280e40b5","Type":"ContainerDied","Data":"07365b6ff136fa2c8f1149e71e50499266a9f62913c49f011cf09408e84effe0"} Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.617902 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07365b6ff136fa2c8f1149e71e50499266a9f62913c49f011cf09408e84effe0" Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.732036 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j5blb" Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.868132 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-catalog-content\") pod \"2178b105-4528-4a3c-be83-c0c4280e40b5\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.868216 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-utilities\") pod \"2178b105-4528-4a3c-be83-c0c4280e40b5\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.868340 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk8tg\" (UniqueName: \"kubernetes.io/projected/2178b105-4528-4a3c-be83-c0c4280e40b5-kube-api-access-pk8tg\") pod \"2178b105-4528-4a3c-be83-c0c4280e40b5\" (UID: \"2178b105-4528-4a3c-be83-c0c4280e40b5\") " Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.874584 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2178b105-4528-4a3c-be83-c0c4280e40b5-kube-api-access-pk8tg" (OuterVolumeSpecName: "kube-api-access-pk8tg") pod "2178b105-4528-4a3c-be83-c0c4280e40b5" (UID: "2178b105-4528-4a3c-be83-c0c4280e40b5"). InnerVolumeSpecName "kube-api-access-pk8tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.885100 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-utilities" (OuterVolumeSpecName: "utilities") pod "2178b105-4528-4a3c-be83-c0c4280e40b5" (UID: "2178b105-4528-4a3c-be83-c0c4280e40b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.970269 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.970315 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk8tg\" (UniqueName: \"kubernetes.io/projected/2178b105-4528-4a3c-be83-c0c4280e40b5-kube-api-access-pk8tg\") on node \"crc\" DevicePath \"\"" Jan 28 17:29:02 crc kubenswrapper[4811]: I0128 17:29:02.981635 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2178b105-4528-4a3c-be83-c0c4280e40b5" (UID: "2178b105-4528-4a3c-be83-c0c4280e40b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:29:03 crc kubenswrapper[4811]: I0128 17:29:03.072906 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2178b105-4528-4a3c-be83-c0c4280e40b5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:29:03 crc kubenswrapper[4811]: I0128 17:29:03.625467 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j5blb" Jan 28 17:29:03 crc kubenswrapper[4811]: I0128 17:29:03.680130 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 17:29:03 crc kubenswrapper[4811]: I0128 17:29:03.692565 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j5blb"] Jan 28 17:29:04 crc kubenswrapper[4811]: I0128 17:29:04.362550 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" path="/var/lib/kubelet/pods/2178b105-4528-4a3c-be83-c0c4280e40b5/volumes" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.812999 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws"] Jan 28 17:29:30 crc kubenswrapper[4811]: E0128 17:29:30.814045 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="extract-content" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814063 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="extract-content" Jan 28 17:29:30 crc kubenswrapper[4811]: E0128 17:29:30.814075 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="extract-utilities" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814082 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="extract-utilities" Jan 28 17:29:30 crc kubenswrapper[4811]: E0128 17:29:30.814105 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon-log" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814113 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon-log" Jan 28 17:29:30 crc kubenswrapper[4811]: E0128 17:29:30.814139 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814147 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" Jan 28 17:29:30 crc kubenswrapper[4811]: E0128 17:29:30.814167 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="registry-server" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814174 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="registry-server" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814469 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon-log" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814487 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2178b105-4528-4a3c-be83-c0c4280e40b5" containerName="registry-server" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.814517 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c3541c8-9738-4b90-92cd-d8676444ef01" containerName="horizon" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.816204 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.819480 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.821518 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws"] Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.936394 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.936521 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:30 crc kubenswrapper[4811]: I0128 17:29:30.936663 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnmqd\" (UniqueName: \"kubernetes.io/projected/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-kube-api-access-cnmqd\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.037771 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.037887 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnmqd\" (UniqueName: \"kubernetes.io/projected/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-kube-api-access-cnmqd\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.037945 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.038411 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.038421 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.062236 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnmqd\" (UniqueName: \"kubernetes.io/projected/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-kube-api-access-cnmqd\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.141106 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.621250 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws"] Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.910487 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" event={"ID":"5a01ed82-4f73-4fc5-8f81-b795ed4877c0","Type":"ContainerStarted","Data":"37b116bbd9237d3c003ba9581c6d1cb9e16d1d23aafb66defc8dde61866bd6fc"} Jan 28 17:29:31 crc kubenswrapper[4811]: I0128 17:29:31.910532 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" event={"ID":"5a01ed82-4f73-4fc5-8f81-b795ed4877c0","Type":"ContainerStarted","Data":"475517b3de6253ee95a35842377b5827b1c1b097afb7dece8e4a93a99fd73efd"} Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.771681 4811 scope.go:117] "RemoveContainer" containerID="f7c6ae3901e1d246a07d368a425f8680d262211717e170d80cd0657bacf621b0" Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.795693 4811 scope.go:117] "RemoveContainer" containerID="f90a5112f9c65145397f562ff177bbdd19806b46d91120f6746b150f0c72fc99" Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.854556 4811 scope.go:117] "RemoveContainer" containerID="8544410b22139b8422837506ce3f85d0e855893dd94d7de451d14e7b44fc0464" Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.893724 4811 scope.go:117] "RemoveContainer" containerID="7e21d6857763ae5b856c90b2d58f5d0619ea75ce87ea67c4aee02eb8127fb13f" Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.927083 4811 generic.go:334] "Generic (PLEG): container finished" podID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerID="37b116bbd9237d3c003ba9581c6d1cb9e16d1d23aafb66defc8dde61866bd6fc" exitCode=0 Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.927786 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" event={"ID":"5a01ed82-4f73-4fc5-8f81-b795ed4877c0","Type":"ContainerDied","Data":"37b116bbd9237d3c003ba9581c6d1cb9e16d1d23aafb66defc8dde61866bd6fc"} Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.949015 4811 scope.go:117] "RemoveContainer" containerID="97ee7c1747e4b2ad3f80fdacd74169e73e00a29c6a600b20635f9fbc15b96092" Jan 28 17:29:32 crc kubenswrapper[4811]: I0128 17:29:32.989285 4811 scope.go:117] "RemoveContainer" containerID="f5d84c12962758b719211ea28916931d7d11691f91dee475568b847d80c457ca" Jan 28 17:29:33 crc kubenswrapper[4811]: I0128 17:29:33.087256 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:29:33 crc kubenswrapper[4811]: I0128 17:29:33.087642 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:29:34 crc kubenswrapper[4811]: I0128 17:29:34.956532 4811 generic.go:334] "Generic (PLEG): container finished" podID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerID="d2bf8aef4d3e985df77075ac12e9989a09af469a9c0f710dbb791ca2c4acb93c" exitCode=0 Jan 28 17:29:34 crc kubenswrapper[4811]: I0128 17:29:34.956611 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" event={"ID":"5a01ed82-4f73-4fc5-8f81-b795ed4877c0","Type":"ContainerDied","Data":"d2bf8aef4d3e985df77075ac12e9989a09af469a9c0f710dbb791ca2c4acb93c"} Jan 28 17:29:35 crc kubenswrapper[4811]: I0128 17:29:35.970928 4811 generic.go:334] "Generic (PLEG): container finished" podID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerID="ca0979e8623e5b205f88e551b7285895b5c002c49a393b13b3da0f5c6a164a21" exitCode=0 Jan 28 17:29:35 crc kubenswrapper[4811]: I0128 17:29:35.971039 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" event={"ID":"5a01ed82-4f73-4fc5-8f81-b795ed4877c0","Type":"ContainerDied","Data":"ca0979e8623e5b205f88e551b7285895b5c002c49a393b13b3da0f5c6a164a21"} Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.348363 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.411070 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-util\") pod \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.411139 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnmqd\" (UniqueName: \"kubernetes.io/projected/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-kube-api-access-cnmqd\") pod \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.419129 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-util" (OuterVolumeSpecName: "util") pod "5a01ed82-4f73-4fc5-8f81-b795ed4877c0" (UID: "5a01ed82-4f73-4fc5-8f81-b795ed4877c0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.431620 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-kube-api-access-cnmqd" (OuterVolumeSpecName: "kube-api-access-cnmqd") pod "5a01ed82-4f73-4fc5-8f81-b795ed4877c0" (UID: "5a01ed82-4f73-4fc5-8f81-b795ed4877c0"). InnerVolumeSpecName "kube-api-access-cnmqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.513015 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-bundle\") pod \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\" (UID: \"5a01ed82-4f73-4fc5-8f81-b795ed4877c0\") " Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.513806 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnmqd\" (UniqueName: \"kubernetes.io/projected/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-kube-api-access-cnmqd\") on node \"crc\" DevicePath \"\"" Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.513824 4811 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-util\") on node \"crc\" DevicePath \"\"" Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.514964 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-bundle" (OuterVolumeSpecName: "bundle") pod "5a01ed82-4f73-4fc5-8f81-b795ed4877c0" (UID: "5a01ed82-4f73-4fc5-8f81-b795ed4877c0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:29:37 crc kubenswrapper[4811]: I0128 17:29:37.615955 4811 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a01ed82-4f73-4fc5-8f81-b795ed4877c0-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:29:38 crc kubenswrapper[4811]: I0128 17:29:38.005194 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" event={"ID":"5a01ed82-4f73-4fc5-8f81-b795ed4877c0","Type":"ContainerDied","Data":"475517b3de6253ee95a35842377b5827b1c1b097afb7dece8e4a93a99fd73efd"} Jan 28 17:29:38 crc kubenswrapper[4811]: I0128 17:29:38.005254 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="475517b3de6253ee95a35842377b5827b1c1b097afb7dece8e4a93a99fd73efd" Jan 28 17:29:38 crc kubenswrapper[4811]: I0128 17:29:38.005383 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.547258 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl"] Jan 28 17:29:49 crc kubenswrapper[4811]: E0128 17:29:49.548214 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="extract" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.548228 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="extract" Jan 28 17:29:49 crc kubenswrapper[4811]: E0128 17:29:49.548248 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="util" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.548254 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="util" Jan 28 17:29:49 crc kubenswrapper[4811]: E0128 17:29:49.548304 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="pull" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.548313 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="pull" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.549054 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a01ed82-4f73-4fc5-8f81-b795ed4877c0" containerName="extract" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.559392 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.562060 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.562125 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.567659 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-q8dm2" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.574485 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.667086 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w95ff\" (UniqueName: \"kubernetes.io/projected/05eff175-d1b2-4fde-b7ff-c94da0a5af2a-kube-api-access-w95ff\") pod \"obo-prometheus-operator-68bc856cb9-wpqgl\" (UID: \"05eff175-d1b2-4fde-b7ff-c94da0a5af2a\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.681855 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.683208 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.688185 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-ft9j9" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.688252 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.706944 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.708391 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.738468 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.749590 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.770806 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w95ff\" (UniqueName: \"kubernetes.io/projected/05eff175-d1b2-4fde-b7ff-c94da0a5af2a-kube-api-access-w95ff\") pod \"obo-prometheus-operator-68bc856cb9-wpqgl\" (UID: \"05eff175-d1b2-4fde-b7ff-c94da0a5af2a\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.807212 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w95ff\" (UniqueName: \"kubernetes.io/projected/05eff175-d1b2-4fde-b7ff-c94da0a5af2a-kube-api-access-w95ff\") pod \"obo-prometheus-operator-68bc856cb9-wpqgl\" (UID: \"05eff175-d1b2-4fde-b7ff-c94da0a5af2a\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.872784 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad3a640d-e3e1-4a4f-ad76-e728f8b73214-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-c8hws\" (UID: \"ad3a640d-e3e1-4a4f-ad76-e728f8b73214\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.873128 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c4f3ab08-1768-4114-98e0-646b6657c0ec-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-pnkd5\" (UID: \"c4f3ab08-1768-4114-98e0-646b6657c0ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.873303 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad3a640d-e3e1-4a4f-ad76-e728f8b73214-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-c8hws\" (UID: \"ad3a640d-e3e1-4a4f-ad76-e728f8b73214\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.873389 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c4f3ab08-1768-4114-98e0-646b6657c0ec-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-pnkd5\" (UID: \"c4f3ab08-1768-4114-98e0-646b6657c0ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.883379 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.893292 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-zfnch"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.894912 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.899077 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-bvtft" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.899261 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.908068 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-zfnch"] Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.975109 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad3a640d-e3e1-4a4f-ad76-e728f8b73214-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-c8hws\" (UID: \"ad3a640d-e3e1-4a4f-ad76-e728f8b73214\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.976234 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c4f3ab08-1768-4114-98e0-646b6657c0ec-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-pnkd5\" (UID: \"c4f3ab08-1768-4114-98e0-646b6657c0ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.976543 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad3a640d-e3e1-4a4f-ad76-e728f8b73214-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-c8hws\" (UID: \"ad3a640d-e3e1-4a4f-ad76-e728f8b73214\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.976677 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c4f3ab08-1768-4114-98e0-646b6657c0ec-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-pnkd5\" (UID: \"c4f3ab08-1768-4114-98e0-646b6657c0ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.980121 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad3a640d-e3e1-4a4f-ad76-e728f8b73214-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-c8hws\" (UID: \"ad3a640d-e3e1-4a4f-ad76-e728f8b73214\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.986261 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c4f3ab08-1768-4114-98e0-646b6657c0ec-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-pnkd5\" (UID: \"c4f3ab08-1768-4114-98e0-646b6657c0ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:49 crc kubenswrapper[4811]: I0128 17:29:49.988058 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c4f3ab08-1768-4114-98e0-646b6657c0ec-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-pnkd5\" (UID: \"c4f3ab08-1768-4114-98e0-646b6657c0ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.002278 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.010085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad3a640d-e3e1-4a4f-ad76-e728f8b73214-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-64974df957-c8hws\" (UID: \"ad3a640d-e3e1-4a4f-ad76-e728f8b73214\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.043788 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.079329 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh2q5\" (UniqueName: \"kubernetes.io/projected/0f9309ad-f7c3-4e57-a1e4-fba21275d588-kube-api-access-zh2q5\") pod \"observability-operator-59bdc8b94-zfnch\" (UID: \"0f9309ad-f7c3-4e57-a1e4-fba21275d588\") " pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.079596 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/0f9309ad-f7c3-4e57-a1e4-fba21275d588-observability-operator-tls\") pod \"observability-operator-59bdc8b94-zfnch\" (UID: \"0f9309ad-f7c3-4e57-a1e4-fba21275d588\") " pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.098461 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-8fslw"] Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.099923 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.108522 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-4p6cl" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.113766 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-8fslw"] Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.190570 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh2q5\" (UniqueName: \"kubernetes.io/projected/0f9309ad-f7c3-4e57-a1e4-fba21275d588-kube-api-access-zh2q5\") pod \"observability-operator-59bdc8b94-zfnch\" (UID: \"0f9309ad-f7c3-4e57-a1e4-fba21275d588\") " pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.190983 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/0f9309ad-f7c3-4e57-a1e4-fba21275d588-observability-operator-tls\") pod \"observability-operator-59bdc8b94-zfnch\" (UID: \"0f9309ad-f7c3-4e57-a1e4-fba21275d588\") " pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.212295 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/0f9309ad-f7c3-4e57-a1e4-fba21275d588-observability-operator-tls\") pod \"observability-operator-59bdc8b94-zfnch\" (UID: \"0f9309ad-f7c3-4e57-a1e4-fba21275d588\") " pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.221920 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh2q5\" (UniqueName: \"kubernetes.io/projected/0f9309ad-f7c3-4e57-a1e4-fba21275d588-kube-api-access-zh2q5\") pod \"observability-operator-59bdc8b94-zfnch\" (UID: \"0f9309ad-f7c3-4e57-a1e4-fba21275d588\") " pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.293522 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/7c526f6a-a27b-4d38-b4a1-c70fed01753f-openshift-service-ca\") pod \"perses-operator-5bf474d74f-8fslw\" (UID: \"7c526f6a-a27b-4d38-b4a1-c70fed01753f\") " pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.293779 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z6l6\" (UniqueName: \"kubernetes.io/projected/7c526f6a-a27b-4d38-b4a1-c70fed01753f-kube-api-access-6z6l6\") pod \"perses-operator-5bf474d74f-8fslw\" (UID: \"7c526f6a-a27b-4d38-b4a1-c70fed01753f\") " pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.421853 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z6l6\" (UniqueName: \"kubernetes.io/projected/7c526f6a-a27b-4d38-b4a1-c70fed01753f-kube-api-access-6z6l6\") pod \"perses-operator-5bf474d74f-8fslw\" (UID: \"7c526f6a-a27b-4d38-b4a1-c70fed01753f\") " pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.423518 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/7c526f6a-a27b-4d38-b4a1-c70fed01753f-openshift-service-ca\") pod \"perses-operator-5bf474d74f-8fslw\" (UID: \"7c526f6a-a27b-4d38-b4a1-c70fed01753f\") " pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.427024 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/7c526f6a-a27b-4d38-b4a1-c70fed01753f-openshift-service-ca\") pod \"perses-operator-5bf474d74f-8fslw\" (UID: \"7c526f6a-a27b-4d38-b4a1-c70fed01753f\") " pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.456162 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z6l6\" (UniqueName: \"kubernetes.io/projected/7c526f6a-a27b-4d38-b4a1-c70fed01753f-kube-api-access-6z6l6\") pod \"perses-operator-5bf474d74f-8fslw\" (UID: \"7c526f6a-a27b-4d38-b4a1-c70fed01753f\") " pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.515153 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.538963 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.590158 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl"] Jan 28 17:29:50 crc kubenswrapper[4811]: W0128 17:29:50.597365 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05eff175_d1b2_4fde_b7ff_c94da0a5af2a.slice/crio-e1729dc53193c48946d8af115f2d5a2567785a8dfdb5499e45a027604e554283 WatchSource:0}: Error finding container e1729dc53193c48946d8af115f2d5a2567785a8dfdb5499e45a027604e554283: Status 404 returned error can't find the container with id e1729dc53193c48946d8af115f2d5a2567785a8dfdb5499e45a027604e554283 Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.601640 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.852998 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5"] Jan 28 17:29:50 crc kubenswrapper[4811]: W0128 17:29:50.870714 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4f3ab08_1768_4114_98e0_646b6657c0ec.slice/crio-08dd5eb98eeaa0394a68b41f52e65ef5edd83112e3781c7d78f57537d1491657 WatchSource:0}: Error finding container 08dd5eb98eeaa0394a68b41f52e65ef5edd83112e3781c7d78f57537d1491657: Status 404 returned error can't find the container with id 08dd5eb98eeaa0394a68b41f52e65ef5edd83112e3781c7d78f57537d1491657 Jan 28 17:29:50 crc kubenswrapper[4811]: I0128 17:29:50.978793 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws"] Jan 28 17:29:51 crc kubenswrapper[4811]: I0128 17:29:51.191134 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" event={"ID":"c4f3ab08-1768-4114-98e0-646b6657c0ec","Type":"ContainerStarted","Data":"08dd5eb98eeaa0394a68b41f52e65ef5edd83112e3781c7d78f57537d1491657"} Jan 28 17:29:51 crc kubenswrapper[4811]: I0128 17:29:51.193707 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" event={"ID":"ad3a640d-e3e1-4a4f-ad76-e728f8b73214","Type":"ContainerStarted","Data":"0c86ac1a0d990cfe81860f941c95e45be737d4100515c42359af9176561b1195"} Jan 28 17:29:51 crc kubenswrapper[4811]: I0128 17:29:51.195407 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-zfnch"] Jan 28 17:29:51 crc kubenswrapper[4811]: I0128 17:29:51.195590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" event={"ID":"05eff175-d1b2-4fde-b7ff-c94da0a5af2a","Type":"ContainerStarted","Data":"e1729dc53193c48946d8af115f2d5a2567785a8dfdb5499e45a027604e554283"} Jan 28 17:29:51 crc kubenswrapper[4811]: I0128 17:29:51.443503 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-8fslw"] Jan 28 17:29:51 crc kubenswrapper[4811]: W0128 17:29:51.481410 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c526f6a_a27b_4d38_b4a1_c70fed01753f.slice/crio-6781c16e365427053f896d5863698100a5284955feae51c2a3abfc7b8cdd3d68 WatchSource:0}: Error finding container 6781c16e365427053f896d5863698100a5284955feae51c2a3abfc7b8cdd3d68: Status 404 returned error can't find the container with id 6781c16e365427053f896d5863698100a5284955feae51c2a3abfc7b8cdd3d68 Jan 28 17:29:52 crc kubenswrapper[4811]: I0128 17:29:52.082971 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-8gn9v"] Jan 28 17:29:52 crc kubenswrapper[4811]: I0128 17:29:52.109963 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-8gn9v"] Jan 28 17:29:52 crc kubenswrapper[4811]: I0128 17:29:52.210554 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" event={"ID":"7c526f6a-a27b-4d38-b4a1-c70fed01753f","Type":"ContainerStarted","Data":"6781c16e365427053f896d5863698100a5284955feae51c2a3abfc7b8cdd3d68"} Jan 28 17:29:52 crc kubenswrapper[4811]: I0128 17:29:52.211627 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" event={"ID":"0f9309ad-f7c3-4e57-a1e4-fba21275d588","Type":"ContainerStarted","Data":"e4101bab09e91b8849555085f7b950c4f4120b3f7ffb63c21c08c1b72fec256a"} Jan 28 17:29:52 crc kubenswrapper[4811]: I0128 17:29:52.378236 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63e69842-46ee-4ad6-a516-1d9a9b9e96c5" path="/var/lib/kubelet/pods/63e69842-46ee-4ad6-a516-1d9a9b9e96c5/volumes" Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.056364 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-99rvc"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.077728 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-3bdf-account-create-update-hgqfn"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.095698 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-6nhjc"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.111511 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-99rvc"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.128316 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-ec47-account-create-update-dnqjz"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.137975 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-eddd-account-create-update-zkh4b"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.147725 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-ec47-account-create-update-dnqjz"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.191682 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-6nhjc"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.211802 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-3bdf-account-create-update-hgqfn"] Jan 28 17:29:53 crc kubenswrapper[4811]: I0128 17:29:53.224174 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-eddd-account-create-update-zkh4b"] Jan 28 17:29:54 crc kubenswrapper[4811]: I0128 17:29:54.365235 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1017da74-ab67-432c-8eb9-8b6b8b7a1aac" path="/var/lib/kubelet/pods/1017da74-ab67-432c-8eb9-8b6b8b7a1aac/volumes" Jan 28 17:29:54 crc kubenswrapper[4811]: I0128 17:29:54.365861 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="470404eb-f10d-46f3-af1a-a173e2fbae0c" path="/var/lib/kubelet/pods/470404eb-f10d-46f3-af1a-a173e2fbae0c/volumes" Jan 28 17:29:54 crc kubenswrapper[4811]: I0128 17:29:54.366401 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1940218-22f9-4af9-b2ff-eb1b19e318f4" path="/var/lib/kubelet/pods/b1940218-22f9-4af9-b2ff-eb1b19e318f4/volumes" Jan 28 17:29:54 crc kubenswrapper[4811]: I0128 17:29:54.372655 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1cae5db-f32e-4625-8331-a2e38a0eaa6b" path="/var/lib/kubelet/pods/e1cae5db-f32e-4625-8331-a2e38a0eaa6b/volumes" Jan 28 17:29:54 crc kubenswrapper[4811]: I0128 17:29:54.374669 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdc78eb0-da28-4fa6-92a4-bb049a6e6196" path="/var/lib/kubelet/pods/fdc78eb0-da28-4fa6-92a4-bb049a6e6196/volumes" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.153342 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk"] Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.155310 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.158861 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.159129 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.166308 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk"] Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.207974 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e82f4af7-32ad-4515-ab89-cfb42f104ea1-secret-volume\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.208409 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e82f4af7-32ad-4515-ab89-cfb42f104ea1-config-volume\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.208593 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhxfr\" (UniqueName: \"kubernetes.io/projected/e82f4af7-32ad-4515-ab89-cfb42f104ea1-kube-api-access-hhxfr\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.311643 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhxfr\" (UniqueName: \"kubernetes.io/projected/e82f4af7-32ad-4515-ab89-cfb42f104ea1-kube-api-access-hhxfr\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.311795 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e82f4af7-32ad-4515-ab89-cfb42f104ea1-secret-volume\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.311857 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e82f4af7-32ad-4515-ab89-cfb42f104ea1-config-volume\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.312725 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e82f4af7-32ad-4515-ab89-cfb42f104ea1-config-volume\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.319978 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e82f4af7-32ad-4515-ab89-cfb42f104ea1-secret-volume\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.334052 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhxfr\" (UniqueName: \"kubernetes.io/projected/e82f4af7-32ad-4515-ab89-cfb42f104ea1-kube-api-access-hhxfr\") pod \"collect-profiles-29493690-tb5dk\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:00 crc kubenswrapper[4811]: I0128 17:30:00.495566 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:02 crc kubenswrapper[4811]: I0128 17:30:02.027585 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bk8c2"] Jan 28 17:30:02 crc kubenswrapper[4811]: I0128 17:30:02.036541 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bk8c2"] Jan 28 17:30:02 crc kubenswrapper[4811]: I0128 17:30:02.353902 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d57f8a25-7690-4908-88e0-f228e953daac" path="/var/lib/kubelet/pods/d57f8a25-7690-4908-88e0-f228e953daac/volumes" Jan 28 17:30:03 crc kubenswrapper[4811]: I0128 17:30:03.087334 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:30:03 crc kubenswrapper[4811]: I0128 17:30:03.087405 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.203865 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk"] Jan 28 17:30:07 crc kubenswrapper[4811]: W0128 17:30:07.221040 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode82f4af7_32ad_4515_ab89_cfb42f104ea1.slice/crio-f8658e5c6fe6529cedf4802218a30ff193e36d9f2d76701d100ff7506a33ff52 WatchSource:0}: Error finding container f8658e5c6fe6529cedf4802218a30ff193e36d9f2d76701d100ff7506a33ff52: Status 404 returned error can't find the container with id f8658e5c6fe6529cedf4802218a30ff193e36d9f2d76701d100ff7506a33ff52 Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.402683 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" event={"ID":"ad3a640d-e3e1-4a4f-ad76-e728f8b73214","Type":"ContainerStarted","Data":"e65cf16306d4800ec084cdead243af1bf9b82c8734e3407d8fc842139778fb03"} Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.405967 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" event={"ID":"05eff175-d1b2-4fde-b7ff-c94da0a5af2a","Type":"ContainerStarted","Data":"32a63446f79171453affa2cb0b4a6b6633575e81808707dc285e4f41f42720a1"} Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.408197 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" event={"ID":"7c526f6a-a27b-4d38-b4a1-c70fed01753f","Type":"ContainerStarted","Data":"92caddfa4de79d7435c8d450e4f219c863a5cae070492746fcf6def1abd179ed"} Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.408817 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.410756 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" event={"ID":"e82f4af7-32ad-4515-ab89-cfb42f104ea1","Type":"ContainerStarted","Data":"f8658e5c6fe6529cedf4802218a30ff193e36d9f2d76701d100ff7506a33ff52"} Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.412249 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" event={"ID":"0f9309ad-f7c3-4e57-a1e4-fba21275d588","Type":"ContainerStarted","Data":"db5cb860a968f4bd3e147c45fa56c59c400939273bf3e2e521a0e585040e4137"} Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.413977 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.415834 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" event={"ID":"c4f3ab08-1768-4114-98e0-646b6657c0ec","Type":"ContainerStarted","Data":"f6a8ef1f1520f90755e4ec28c8c9472edc0a1139212e625173d6118b643a59a1"} Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.418658 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.439420 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-c8hws" podStartSLOduration=2.8397013859999998 podStartE2EDuration="18.439400932s" podCreationTimestamp="2026-01-28 17:29:49 +0000 UTC" firstStartedPulling="2026-01-28 17:29:51.020767832 +0000 UTC m=+6283.775131415" lastFinishedPulling="2026-01-28 17:30:06.620467378 +0000 UTC m=+6299.374830961" observedRunningTime="2026-01-28 17:30:07.436832782 +0000 UTC m=+6300.191196375" watchObservedRunningTime="2026-01-28 17:30:07.439400932 +0000 UTC m=+6300.193764515" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.490878 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-zfnch" podStartSLOduration=3.044092312 podStartE2EDuration="18.490857904s" podCreationTimestamp="2026-01-28 17:29:49 +0000 UTC" firstStartedPulling="2026-01-28 17:29:51.232311323 +0000 UTC m=+6283.986674906" lastFinishedPulling="2026-01-28 17:30:06.679076915 +0000 UTC m=+6299.433440498" observedRunningTime="2026-01-28 17:30:07.477771588 +0000 UTC m=+6300.232135171" watchObservedRunningTime="2026-01-28 17:30:07.490857904 +0000 UTC m=+6300.245221487" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.513163 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-64974df957-pnkd5" podStartSLOduration=2.834064782 podStartE2EDuration="18.513144991s" podCreationTimestamp="2026-01-28 17:29:49 +0000 UTC" firstStartedPulling="2026-01-28 17:29:50.910626061 +0000 UTC m=+6283.664989644" lastFinishedPulling="2026-01-28 17:30:06.58970627 +0000 UTC m=+6299.344069853" observedRunningTime="2026-01-28 17:30:07.510303344 +0000 UTC m=+6300.264666927" watchObservedRunningTime="2026-01-28 17:30:07.513144991 +0000 UTC m=+6300.267508574" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.588121 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wpqgl" podStartSLOduration=2.586705414 podStartE2EDuration="18.588078201s" podCreationTimestamp="2026-01-28 17:29:49 +0000 UTC" firstStartedPulling="2026-01-28 17:29:50.601256536 +0000 UTC m=+6283.355620119" lastFinishedPulling="2026-01-28 17:30:06.602629323 +0000 UTC m=+6299.356992906" observedRunningTime="2026-01-28 17:30:07.581353759 +0000 UTC m=+6300.335717372" watchObservedRunningTime="2026-01-28 17:30:07.588078201 +0000 UTC m=+6300.342441784" Jan 28 17:30:07 crc kubenswrapper[4811]: I0128 17:30:07.593377 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" podStartSLOduration=2.47583089 podStartE2EDuration="17.593364645s" podCreationTimestamp="2026-01-28 17:29:50 +0000 UTC" firstStartedPulling="2026-01-28 17:29:51.484311676 +0000 UTC m=+6284.238675269" lastFinishedPulling="2026-01-28 17:30:06.601845451 +0000 UTC m=+6299.356209024" observedRunningTime="2026-01-28 17:30:07.556989615 +0000 UTC m=+6300.311353198" watchObservedRunningTime="2026-01-28 17:30:07.593364645 +0000 UTC m=+6300.347728238" Jan 28 17:30:08 crc kubenswrapper[4811]: I0128 17:30:08.425724 4811 generic.go:334] "Generic (PLEG): container finished" podID="e82f4af7-32ad-4515-ab89-cfb42f104ea1" containerID="b467fb8cbbce2277767cc26cb35d2a020c7e93c806fabc9d86ffba6dd3cdd98d" exitCode=0 Jan 28 17:30:08 crc kubenswrapper[4811]: I0128 17:30:08.426012 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" event={"ID":"e82f4af7-32ad-4515-ab89-cfb42f104ea1","Type":"ContainerDied","Data":"b467fb8cbbce2277767cc26cb35d2a020c7e93c806fabc9d86ffba6dd3cdd98d"} Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.835747 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.929583 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e82f4af7-32ad-4515-ab89-cfb42f104ea1-config-volume\") pod \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.930240 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhxfr\" (UniqueName: \"kubernetes.io/projected/e82f4af7-32ad-4515-ab89-cfb42f104ea1-kube-api-access-hhxfr\") pod \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.930407 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e82f4af7-32ad-4515-ab89-cfb42f104ea1-secret-volume\") pod \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\" (UID: \"e82f4af7-32ad-4515-ab89-cfb42f104ea1\") " Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.930740 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e82f4af7-32ad-4515-ab89-cfb42f104ea1-config-volume" (OuterVolumeSpecName: "config-volume") pod "e82f4af7-32ad-4515-ab89-cfb42f104ea1" (UID: "e82f4af7-32ad-4515-ab89-cfb42f104ea1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.931147 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e82f4af7-32ad-4515-ab89-cfb42f104ea1-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.939738 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e82f4af7-32ad-4515-ab89-cfb42f104ea1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e82f4af7-32ad-4515-ab89-cfb42f104ea1" (UID: "e82f4af7-32ad-4515-ab89-cfb42f104ea1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:30:09 crc kubenswrapper[4811]: I0128 17:30:09.949806 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e82f4af7-32ad-4515-ab89-cfb42f104ea1-kube-api-access-hhxfr" (OuterVolumeSpecName: "kube-api-access-hhxfr") pod "e82f4af7-32ad-4515-ab89-cfb42f104ea1" (UID: "e82f4af7-32ad-4515-ab89-cfb42f104ea1"). InnerVolumeSpecName "kube-api-access-hhxfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.033136 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhxfr\" (UniqueName: \"kubernetes.io/projected/e82f4af7-32ad-4515-ab89-cfb42f104ea1-kube-api-access-hhxfr\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.033174 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e82f4af7-32ad-4515-ab89-cfb42f104ea1-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.445799 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" event={"ID":"e82f4af7-32ad-4515-ab89-cfb42f104ea1","Type":"ContainerDied","Data":"f8658e5c6fe6529cedf4802218a30ff193e36d9f2d76701d100ff7506a33ff52"} Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.446107 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8658e5c6fe6529cedf4802218a30ff193e36d9f2d76701d100ff7506a33ff52" Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.445827 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk" Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.903052 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg"] Jan 28 17:30:10 crc kubenswrapper[4811]: I0128 17:30:10.910405 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493645-5tfjg"] Jan 28 17:30:12 crc kubenswrapper[4811]: I0128 17:30:12.370246 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13094dcc-9bfd-4733-bf36-aa370104fe4e" path="/var/lib/kubelet/pods/13094dcc-9bfd-4733-bf36-aa370104fe4e/volumes" Jan 28 17:30:20 crc kubenswrapper[4811]: I0128 17:30:20.542769 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-8fslw" Jan 28 17:30:21 crc kubenswrapper[4811]: I0128 17:30:21.055493 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-l4rz7"] Jan 28 17:30:21 crc kubenswrapper[4811]: I0128 17:30:21.066274 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-l4rz7"] Jan 28 17:30:22 crc kubenswrapper[4811]: I0128 17:30:22.046243 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-5vhsh"] Jan 28 17:30:22 crc kubenswrapper[4811]: I0128 17:30:22.060015 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-5vhsh"] Jan 28 17:30:22 crc kubenswrapper[4811]: I0128 17:30:22.353713 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fc79f2a-5b46-4089-aa7c-517621407f96" path="/var/lib/kubelet/pods/3fc79f2a-5b46-4089-aa7c-517621407f96/volumes" Jan 28 17:30:22 crc kubenswrapper[4811]: I0128 17:30:22.354380 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d34259dd-463f-4e47-9284-da3824688952" path="/var/lib/kubelet/pods/d34259dd-463f-4e47-9284-da3824688952/volumes" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.294496 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.295049 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" containerName="openstackclient" containerID="cri-o://eac79f1b01bdef15ac09ff6ed35630e6acd06463265746d75bb314588a785b46" gracePeriod=2 Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.319338 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.369866 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 28 17:30:23 crc kubenswrapper[4811]: E0128 17:30:23.370541 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e82f4af7-32ad-4515-ab89-cfb42f104ea1" containerName="collect-profiles" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.370627 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e82f4af7-32ad-4515-ab89-cfb42f104ea1" containerName="collect-profiles" Jan 28 17:30:23 crc kubenswrapper[4811]: E0128 17:30:23.370731 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" containerName="openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.370794 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" containerName="openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.371050 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e82f4af7-32ad-4515-ab89-cfb42f104ea1" containerName="collect-profiles" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.371119 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" containerName="openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.371890 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.390939 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.435452 4811 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" podUID="00e32ab2-c148-4f2b-b965-ea60e4d2abc4" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.527243 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-openstack-config-secret\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.527473 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clwdg\" (UniqueName: \"kubernetes.io/projected/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-kube-api-access-clwdg\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.527650 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-openstack-config\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.554734 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.557658 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.561174 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-m2tjf" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.582732 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.629974 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-openstack-config\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.630074 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-openstack-config-secret\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.630235 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clwdg\" (UniqueName: \"kubernetes.io/projected/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-kube-api-access-clwdg\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.631570 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-openstack-config\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.639056 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-openstack-config-secret\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.703144 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clwdg\" (UniqueName: \"kubernetes.io/projected/00e32ab2-c148-4f2b-b965-ea60e4d2abc4-kube-api-access-clwdg\") pod \"openstackclient\" (UID: \"00e32ab2-c148-4f2b-b965-ea60e4d2abc4\") " pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.714453 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.732529 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96zhm\" (UniqueName: \"kubernetes.io/projected/4860d3b1-9c47-4438-9a23-1fb475ac7853-kube-api-access-96zhm\") pod \"kube-state-metrics-0\" (UID: \"4860d3b1-9c47-4438-9a23-1fb475ac7853\") " pod="openstack/kube-state-metrics-0" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.834087 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96zhm\" (UniqueName: \"kubernetes.io/projected/4860d3b1-9c47-4438-9a23-1fb475ac7853-kube-api-access-96zhm\") pod \"kube-state-metrics-0\" (UID: \"4860d3b1-9c47-4438-9a23-1fb475ac7853\") " pod="openstack/kube-state-metrics-0" Jan 28 17:30:23 crc kubenswrapper[4811]: I0128 17:30:23.890994 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96zhm\" (UniqueName: \"kubernetes.io/projected/4860d3b1-9c47-4438-9a23-1fb475ac7853-kube-api-access-96zhm\") pod \"kube-state-metrics-0\" (UID: \"4860d3b1-9c47-4438-9a23-1fb475ac7853\") " pod="openstack/kube-state-metrics-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.188963 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.420796 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.434362 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.439709 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.440141 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.441508 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.441925 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.442072 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-4mgkj" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.446299 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.555795 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.555889 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd275\" (UniqueName: \"kubernetes.io/projected/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-kube-api-access-xd275\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.556128 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.556223 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.556672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.556723 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.556800 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.659610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.659916 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.659961 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.659986 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.660011 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.660049 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.660072 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd275\" (UniqueName: \"kubernetes.io/projected/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-kube-api-access-xd275\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.663063 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.673879 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.679767 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.681128 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.681277 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.689288 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.706268 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd275\" (UniqueName: \"kubernetes.io/projected/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-kube-api-access-xd275\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.709007 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f828c92-3711-4acd-a17b-e7f3a6fd75e8-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6f828c92-3711-4acd-a17b-e7f3a6fd75e8\") " pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:24 crc kubenswrapper[4811]: I0128 17:30:24.764128 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:24.998034 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.002409 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.015878 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.016225 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.016380 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-qtl8h" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.016560 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.016754 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.017627 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.017780 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.017917 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.026108 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.120120 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.176332 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.176944 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfx2w\" (UniqueName: \"kubernetes.io/projected/02f75eea-3b22-4f21-8f2c-c3f93423f33f-kube-api-access-jfx2w\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177021 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/02f75eea-3b22-4f21-8f2c-c3f93423f33f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177094 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-config\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177129 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/02f75eea-3b22-4f21-8f2c-c3f93423f33f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177159 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177227 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177311 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177394 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.177486 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.286155 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfx2w\" (UniqueName: \"kubernetes.io/projected/02f75eea-3b22-4f21-8f2c-c3f93423f33f-kube-api-access-jfx2w\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.286560 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/02f75eea-3b22-4f21-8f2c-c3f93423f33f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.286752 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-config\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.286876 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/02f75eea-3b22-4f21-8f2c-c3f93423f33f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.286977 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.287109 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.287269 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.287449 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.287569 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.287797 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.288905 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.289704 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.302097 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/02f75eea-3b22-4f21-8f2c-c3f93423f33f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.304408 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/02f75eea-3b22-4f21-8f2c-c3f93423f33f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.315281 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.316073 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-config\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.316768 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/02f75eea-3b22-4f21-8f2c-c3f93423f33f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.322060 4811 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.322113 4811 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/edbce70f544bccd2f86fdeb0a28eecb7e75ad004c46df3e0f07279eac7ecf551/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.323255 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/02f75eea-3b22-4f21-8f2c-c3f93423f33f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.334363 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfx2w\" (UniqueName: \"kubernetes.io/projected/02f75eea-3b22-4f21-8f2c-c3f93423f33f-kube-api-access-jfx2w\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.454776 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39e9fa5d-3f85-4efa-a01c-f3c767bc2c05\") pod \"prometheus-metric-storage-0\" (UID: \"02f75eea-3b22-4f21-8f2c-c3f93423f33f\") " pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.651682 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"00e32ab2-c148-4f2b-b965-ea60e4d2abc4","Type":"ContainerStarted","Data":"f186f029215fb699e63abfe9e33662321a836785b681e102ab171a7501729797"} Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.651737 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"00e32ab2-c148-4f2b-b965-ea60e4d2abc4","Type":"ContainerStarted","Data":"4b17a57d8ea12c11490f38a2864e2616fed8c02b50c14a47f7d75266f1d5f33c"} Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.657070 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.668857 4811 generic.go:334] "Generic (PLEG): container finished" podID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" containerID="eac79f1b01bdef15ac09ff6ed35630e6acd06463265746d75bb314588a785b46" exitCode=137 Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.679440 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4860d3b1-9c47-4438-9a23-1fb475ac7853","Type":"ContainerStarted","Data":"3bf356a4e4f2168759a0fb1dd07902b586b6e4c432148fb17460b2673c6b5500"} Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.691786 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.691772166 podStartE2EDuration="2.691772166s" podCreationTimestamp="2026-01-28 17:30:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:30:25.690514772 +0000 UTC m=+6318.444878355" watchObservedRunningTime="2026-01-28 17:30:25.691772166 +0000 UTC m=+6318.446135749" Jan 28 17:30:25 crc kubenswrapper[4811]: I0128 17:30:25.777322 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.192607 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.316991 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config-secret\") pod \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.317148 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f927p\" (UniqueName: \"kubernetes.io/projected/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-kube-api-access-f927p\") pod \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.317368 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config\") pod \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\" (UID: \"08cfa5b2-641d-4b3e-89d2-81a5af8f3834\") " Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.323745 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-kube-api-access-f927p" (OuterVolumeSpecName: "kube-api-access-f927p") pod "08cfa5b2-641d-4b3e-89d2-81a5af8f3834" (UID: "08cfa5b2-641d-4b3e-89d2-81a5af8f3834"). InnerVolumeSpecName "kube-api-access-f927p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.373904 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "08cfa5b2-641d-4b3e-89d2-81a5af8f3834" (UID: "08cfa5b2-641d-4b3e-89d2-81a5af8f3834"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.419875 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.419905 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f927p\" (UniqueName: \"kubernetes.io/projected/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-kube-api-access-f927p\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.443209 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "08cfa5b2-641d-4b3e-89d2-81a5af8f3834" (UID: "08cfa5b2-641d-4b3e-89d2-81a5af8f3834"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.484858 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 28 17:30:26 crc kubenswrapper[4811]: W0128 17:30:26.490821 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02f75eea_3b22_4f21_8f2c_c3f93423f33f.slice/crio-d2ef4df37864803d12961dbc4c33af3406fd7b333a408c074edf67b88e3411a4 WatchSource:0}: Error finding container d2ef4df37864803d12961dbc4c33af3406fd7b333a408c074edf67b88e3411a4: Status 404 returned error can't find the container with id d2ef4df37864803d12961dbc4c33af3406fd7b333a408c074edf67b88e3411a4 Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.521793 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08cfa5b2-641d-4b3e-89d2-81a5af8f3834-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.690712 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"02f75eea-3b22-4f21-8f2c-c3f93423f33f","Type":"ContainerStarted","Data":"d2ef4df37864803d12961dbc4c33af3406fd7b333a408c074edf67b88e3411a4"} Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.694054 4811 scope.go:117] "RemoveContainer" containerID="eac79f1b01bdef15ac09ff6ed35630e6acd06463265746d75bb314588a785b46" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.694174 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 28 17:30:26 crc kubenswrapper[4811]: I0128 17:30:26.707157 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6f828c92-3711-4acd-a17b-e7f3a6fd75e8","Type":"ContainerStarted","Data":"6b188e4b7318058a19bd19920e37bc1718f426e5d64a01c731f593f3a5cd6858"} Jan 28 17:30:27 crc kubenswrapper[4811]: I0128 17:30:27.718212 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4860d3b1-9c47-4438-9a23-1fb475ac7853","Type":"ContainerStarted","Data":"6b163ba1ec493dbc903c2c092786ef43dfeb37b4402b4f87d4a7e1bc4fdc944f"} Jan 28 17:30:27 crc kubenswrapper[4811]: I0128 17:30:27.718839 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 28 17:30:27 crc kubenswrapper[4811]: I0128 17:30:27.740054 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.20558672 podStartE2EDuration="4.740032271s" podCreationTimestamp="2026-01-28 17:30:23 +0000 UTC" firstStartedPulling="2026-01-28 17:30:25.135616889 +0000 UTC m=+6317.889980472" lastFinishedPulling="2026-01-28 17:30:26.67006244 +0000 UTC m=+6319.424426023" observedRunningTime="2026-01-28 17:30:27.732386633 +0000 UTC m=+6320.486750216" watchObservedRunningTime="2026-01-28 17:30:27.740032271 +0000 UTC m=+6320.494395854" Jan 28 17:30:28 crc kubenswrapper[4811]: I0128 17:30:28.353983 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08cfa5b2-641d-4b3e-89d2-81a5af8f3834" path="/var/lib/kubelet/pods/08cfa5b2-641d-4b3e-89d2-81a5af8f3834/volumes" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.723811 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nrgkn"] Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.727127 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.738675 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nrgkn"] Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.784769 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6f828c92-3711-4acd-a17b-e7f3a6fd75e8","Type":"ContainerStarted","Data":"1cda629b1a625c3b82a0da2ef457df2208831a34e8248407f61d08e40c30ff3b"} Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.875636 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6wbc\" (UniqueName: \"kubernetes.io/projected/ff52afc6-bc59-4086-8826-afbc11b5985f-kube-api-access-x6wbc\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.875681 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-catalog-content\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.875839 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-utilities\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.978392 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6wbc\" (UniqueName: \"kubernetes.io/projected/ff52afc6-bc59-4086-8826-afbc11b5985f-kube-api-access-x6wbc\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.978686 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-catalog-content\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.978984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-utilities\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.979298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-catalog-content\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:32 crc kubenswrapper[4811]: I0128 17:30:32.979410 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-utilities\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.087543 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.087828 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.087983 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.088645 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.088812 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" gracePeriod=600 Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.101380 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6wbc\" (UniqueName: \"kubernetes.io/projected/ff52afc6-bc59-4086-8826-afbc11b5985f-kube-api-access-x6wbc\") pod \"redhat-marketplace-nrgkn\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.157026 4811 scope.go:117] "RemoveContainer" containerID="37703212b593fd9ae93897220f10c170c22fb81bf9d081d708c345f6d43c5545" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.215411 4811 scope.go:117] "RemoveContainer" containerID="34dc44328ff7640bf0d1d622839ae48b0134339e8f707e6a0c34eba85e6ae89a" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.279316 4811 scope.go:117] "RemoveContainer" containerID="8b68c13448507fcffa8c820fd66bbb75e52b3b9f22b1191581477d98731338db" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.334543 4811 scope.go:117] "RemoveContainer" containerID="d0e2efe951dda41b33d59a9746a5bdeb2c3469772e8c06b657033b2b019dd94b" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.380636 4811 scope.go:117] "RemoveContainer" containerID="01d31856a129f95f65f92a1cadd8dd34a38305f752d7992560027cf5c164e3ab" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.388931 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.407994 4811 scope.go:117] "RemoveContainer" containerID="8f9b11063f5240f101ecc491c17e4609c152174ccc2d7ffd20494b56a2bb4f67" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.443575 4811 scope.go:117] "RemoveContainer" containerID="1a70d8d63e884eab873fec90dbd70e786e8aa501847ddf8c245860a58e535d77" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.462995 4811 scope.go:117] "RemoveContainer" containerID="ef428f512bc211ec40f2a4a7c632a7c7929aedff48882995beb79cb7f80d9007" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.488055 4811 scope.go:117] "RemoveContainer" containerID="5283b1d61528f78cba45ccc77e3c2d3739cb9c697ef7448f7615cb23e66ed4a4" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.521609 4811 scope.go:117] "RemoveContainer" containerID="108f4df49cd263d45463b70d92fc0237e0bc008a8bf3edc50c3d6c14782422d7" Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.800460 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" exitCode=0 Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.800539 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35"} Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.801931 4811 scope.go:117] "RemoveContainer" containerID="3dea4adb7c8ad114e1ea2dd4f826ff466865b50d5f1c4a4806ff535fa6793f5b" Jan 28 17:30:33 crc kubenswrapper[4811]: E0128 17:30:33.828153 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:30:33 crc kubenswrapper[4811]: W0128 17:30:33.959405 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff52afc6_bc59_4086_8826_afbc11b5985f.slice/crio-b239e0b478d23bc2502d89a5f8cc647e5305d80a1a1d111b3d51173d6f5c2efc WatchSource:0}: Error finding container b239e0b478d23bc2502d89a5f8cc647e5305d80a1a1d111b3d51173d6f5c2efc: Status 404 returned error can't find the container with id b239e0b478d23bc2502d89a5f8cc647e5305d80a1a1d111b3d51173d6f5c2efc Jan 28 17:30:33 crc kubenswrapper[4811]: I0128 17:30:33.960396 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nrgkn"] Jan 28 17:30:34 crc kubenswrapper[4811]: I0128 17:30:34.196937 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 28 17:30:34 crc kubenswrapper[4811]: I0128 17:30:34.823540 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:30:34 crc kubenswrapper[4811]: I0128 17:30:34.823891 4811 generic.go:334] "Generic (PLEG): container finished" podID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerID="d45f5a23f89b788c8bf9c2d068db2eb3dab74c6e8c2b31079a023a136d637108" exitCode=0 Jan 28 17:30:34 crc kubenswrapper[4811]: I0128 17:30:34.823927 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerDied","Data":"d45f5a23f89b788c8bf9c2d068db2eb3dab74c6e8c2b31079a023a136d637108"} Jan 28 17:30:34 crc kubenswrapper[4811]: I0128 17:30:34.823953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerStarted","Data":"b239e0b478d23bc2502d89a5f8cc647e5305d80a1a1d111b3d51173d6f5c2efc"} Jan 28 17:30:34 crc kubenswrapper[4811]: E0128 17:30:34.824770 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:30:35 crc kubenswrapper[4811]: I0128 17:30:35.833827 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"02f75eea-3b22-4f21-8f2c-c3f93423f33f","Type":"ContainerStarted","Data":"5ac914bb8a3e35603627f582f6564d74b97aafe63ef8ffbd39e7f9e6fd25f7da"} Jan 28 17:30:36 crc kubenswrapper[4811]: I0128 17:30:36.031711 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-8f868"] Jan 28 17:30:36 crc kubenswrapper[4811]: I0128 17:30:36.041313 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-8f868"] Jan 28 17:30:36 crc kubenswrapper[4811]: I0128 17:30:36.354132 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f89fe70-e439-44f2-9c8f-f665bb2b646d" path="/var/lib/kubelet/pods/5f89fe70-e439-44f2-9c8f-f665bb2b646d/volumes" Jan 28 17:30:36 crc kubenswrapper[4811]: I0128 17:30:36.847009 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerStarted","Data":"baf7b79dd745b45727df9c761b126391fb6a8f4d54642f3adee87f6a003ebef0"} Jan 28 17:30:37 crc kubenswrapper[4811]: I0128 17:30:37.861395 4811 generic.go:334] "Generic (PLEG): container finished" podID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerID="baf7b79dd745b45727df9c761b126391fb6a8f4d54642f3adee87f6a003ebef0" exitCode=0 Jan 28 17:30:37 crc kubenswrapper[4811]: I0128 17:30:37.861926 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerDied","Data":"baf7b79dd745b45727df9c761b126391fb6a8f4d54642f3adee87f6a003ebef0"} Jan 28 17:30:39 crc kubenswrapper[4811]: I0128 17:30:39.970764 4811 generic.go:334] "Generic (PLEG): container finished" podID="6f828c92-3711-4acd-a17b-e7f3a6fd75e8" containerID="1cda629b1a625c3b82a0da2ef457df2208831a34e8248407f61d08e40c30ff3b" exitCode=0 Jan 28 17:30:39 crc kubenswrapper[4811]: I0128 17:30:39.971027 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6f828c92-3711-4acd-a17b-e7f3a6fd75e8","Type":"ContainerDied","Data":"1cda629b1a625c3b82a0da2ef457df2208831a34e8248407f61d08e40c30ff3b"} Jan 28 17:30:39 crc kubenswrapper[4811]: I0128 17:30:39.977814 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerStarted","Data":"e23b76fb5e9126f8609bd57b8abfe3e6bd98585a9f96e8ed2da7f69a017088ba"} Jan 28 17:30:40 crc kubenswrapper[4811]: I0128 17:30:40.005555 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nrgkn" podStartSLOduration=3.797883057 podStartE2EDuration="8.005536651s" podCreationTimestamp="2026-01-28 17:30:32 +0000 UTC" firstStartedPulling="2026-01-28 17:30:34.826290755 +0000 UTC m=+6327.580654338" lastFinishedPulling="2026-01-28 17:30:39.033944349 +0000 UTC m=+6331.788307932" observedRunningTime="2026-01-28 17:30:40.004947985 +0000 UTC m=+6332.759311568" watchObservedRunningTime="2026-01-28 17:30:40.005536651 +0000 UTC m=+6332.759900234" Jan 28 17:30:43 crc kubenswrapper[4811]: I0128 17:30:43.006590 4811 generic.go:334] "Generic (PLEG): container finished" podID="02f75eea-3b22-4f21-8f2c-c3f93423f33f" containerID="5ac914bb8a3e35603627f582f6564d74b97aafe63ef8ffbd39e7f9e6fd25f7da" exitCode=0 Jan 28 17:30:43 crc kubenswrapper[4811]: I0128 17:30:43.006653 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"02f75eea-3b22-4f21-8f2c-c3f93423f33f","Type":"ContainerDied","Data":"5ac914bb8a3e35603627f582f6564d74b97aafe63ef8ffbd39e7f9e6fd25f7da"} Jan 28 17:30:43 crc kubenswrapper[4811]: I0128 17:30:43.009620 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6f828c92-3711-4acd-a17b-e7f3a6fd75e8","Type":"ContainerStarted","Data":"054e61425ead88f74df9d05f9402c65698d2fbfabcd51d55ffa690a021b602a8"} Jan 28 17:30:43 crc kubenswrapper[4811]: I0128 17:30:43.389232 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:43 crc kubenswrapper[4811]: I0128 17:30:43.389648 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:43 crc kubenswrapper[4811]: I0128 17:30:43.439211 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:44 crc kubenswrapper[4811]: I0128 17:30:44.062977 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:44 crc kubenswrapper[4811]: I0128 17:30:44.105803 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nrgkn"] Jan 28 17:30:46 crc kubenswrapper[4811]: I0128 17:30:46.038698 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nrgkn" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="registry-server" containerID="cri-o://e23b76fb5e9126f8609bd57b8abfe3e6bd98585a9f96e8ed2da7f69a017088ba" gracePeriod=2 Jan 28 17:30:47 crc kubenswrapper[4811]: I0128 17:30:47.056624 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6f828c92-3711-4acd-a17b-e7f3a6fd75e8","Type":"ContainerStarted","Data":"dba1162ec596b0aca33af986f750d2d42e5c9176f65db306d7086d9d37cb796a"} Jan 28 17:30:47 crc kubenswrapper[4811]: I0128 17:30:47.059698 4811 generic.go:334] "Generic (PLEG): container finished" podID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerID="e23b76fb5e9126f8609bd57b8abfe3e6bd98585a9f96e8ed2da7f69a017088ba" exitCode=0 Jan 28 17:30:47 crc kubenswrapper[4811]: I0128 17:30:47.059737 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerDied","Data":"e23b76fb5e9126f8609bd57b8abfe3e6bd98585a9f96e8ed2da7f69a017088ba"} Jan 28 17:30:47 crc kubenswrapper[4811]: I0128 17:30:47.087109 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.720090273 podStartE2EDuration="23.087074977s" podCreationTimestamp="2026-01-28 17:30:24 +0000 UTC" firstStartedPulling="2026-01-28 17:30:25.878839981 +0000 UTC m=+6318.633203564" lastFinishedPulling="2026-01-28 17:30:42.245824685 +0000 UTC m=+6335.000188268" observedRunningTime="2026-01-28 17:30:47.081819333 +0000 UTC m=+6339.836182916" watchObservedRunningTime="2026-01-28 17:30:47.087074977 +0000 UTC m=+6339.841438560" Jan 28 17:30:47 crc kubenswrapper[4811]: I0128 17:30:47.340566 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:30:47 crc kubenswrapper[4811]: E0128 17:30:47.340895 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.073588 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.082083 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.754272 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.862986 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-catalog-content\") pod \"ff52afc6-bc59-4086-8826-afbc11b5985f\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.863213 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-utilities\") pod \"ff52afc6-bc59-4086-8826-afbc11b5985f\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.863358 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6wbc\" (UniqueName: \"kubernetes.io/projected/ff52afc6-bc59-4086-8826-afbc11b5985f-kube-api-access-x6wbc\") pod \"ff52afc6-bc59-4086-8826-afbc11b5985f\" (UID: \"ff52afc6-bc59-4086-8826-afbc11b5985f\") " Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.864190 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-utilities" (OuterVolumeSpecName: "utilities") pod "ff52afc6-bc59-4086-8826-afbc11b5985f" (UID: "ff52afc6-bc59-4086-8826-afbc11b5985f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.864480 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.867417 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff52afc6-bc59-4086-8826-afbc11b5985f-kube-api-access-x6wbc" (OuterVolumeSpecName: "kube-api-access-x6wbc") pod "ff52afc6-bc59-4086-8826-afbc11b5985f" (UID: "ff52afc6-bc59-4086-8826-afbc11b5985f"). InnerVolumeSpecName "kube-api-access-x6wbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.882806 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff52afc6-bc59-4086-8826-afbc11b5985f" (UID: "ff52afc6-bc59-4086-8826-afbc11b5985f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.966568 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6wbc\" (UniqueName: \"kubernetes.io/projected/ff52afc6-bc59-4086-8826-afbc11b5985f-kube-api-access-x6wbc\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:48 crc kubenswrapper[4811]: I0128 17:30:48.966984 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52afc6-bc59-4086-8826-afbc11b5985f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.082410 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"02f75eea-3b22-4f21-8f2c-c3f93423f33f","Type":"ContainerStarted","Data":"7ee5414ca349572949dba65ec3e2b789026134e383202fb69de258f996d410ae"} Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.084621 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nrgkn" event={"ID":"ff52afc6-bc59-4086-8826-afbc11b5985f","Type":"ContainerDied","Data":"b239e0b478d23bc2502d89a5f8cc647e5305d80a1a1d111b3d51173d6f5c2efc"} Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.084689 4811 scope.go:117] "RemoveContainer" containerID="e23b76fb5e9126f8609bd57b8abfe3e6bd98585a9f96e8ed2da7f69a017088ba" Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.084649 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nrgkn" Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.127534 4811 scope.go:117] "RemoveContainer" containerID="baf7b79dd745b45727df9c761b126391fb6a8f4d54642f3adee87f6a003ebef0" Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.133825 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nrgkn"] Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.144044 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nrgkn"] Jan 28 17:30:49 crc kubenswrapper[4811]: I0128 17:30:49.159702 4811 scope.go:117] "RemoveContainer" containerID="d45f5a23f89b788c8bf9c2d068db2eb3dab74c6e8c2b31079a023a136d637108" Jan 28 17:30:50 crc kubenswrapper[4811]: I0128 17:30:50.354846 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" path="/var/lib/kubelet/pods/ff52afc6-bc59-4086-8826-afbc11b5985f/volumes" Jan 28 17:30:53 crc kubenswrapper[4811]: I0128 17:30:53.133849 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"02f75eea-3b22-4f21-8f2c-c3f93423f33f","Type":"ContainerStarted","Data":"d324cdce4ede6fd2c2dd1c8262faae0f4dea8d871246eb420ea198e8ecb6db01"} Jan 28 17:30:56 crc kubenswrapper[4811]: I0128 17:30:56.162460 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"02f75eea-3b22-4f21-8f2c-c3f93423f33f","Type":"ContainerStarted","Data":"626bb6b73e28f3bb18b8333810d3e5c13fe7de907dae55a845d40d1172f4a1a7"} Jan 28 17:30:56 crc kubenswrapper[4811]: I0128 17:30:56.193119 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.973977526 podStartE2EDuration="33.193100161s" podCreationTimestamp="2026-01-28 17:30:23 +0000 UTC" firstStartedPulling="2026-01-28 17:30:26.496478912 +0000 UTC m=+6319.250842495" lastFinishedPulling="2026-01-28 17:30:55.715601547 +0000 UTC m=+6348.469965130" observedRunningTime="2026-01-28 17:30:56.183907782 +0000 UTC m=+6348.938271375" watchObservedRunningTime="2026-01-28 17:30:56.193100161 +0000 UTC m=+6348.947463744" Jan 28 17:30:58 crc kubenswrapper[4811]: I0128 17:30:58.346234 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:30:58 crc kubenswrapper[4811]: E0128 17:30:58.346821 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:31:00 crc kubenswrapper[4811]: I0128 17:31:00.657780 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.913005 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:01 crc kubenswrapper[4811]: E0128 17:31:01.913693 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="extract-content" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.913709 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="extract-content" Jan 28 17:31:01 crc kubenswrapper[4811]: E0128 17:31:01.913727 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="registry-server" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.913733 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="registry-server" Jan 28 17:31:01 crc kubenswrapper[4811]: E0128 17:31:01.913743 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="extract-utilities" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.913748 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="extract-utilities" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.913963 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff52afc6-bc59-4086-8826-afbc11b5985f" containerName="registry-server" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.915995 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.918229 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.922248 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 17:31:01 crc kubenswrapper[4811]: I0128 17:31:01.928942 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.046856 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.046897 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-config-data\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.046917 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.046957 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79t76\" (UniqueName: \"kubernetes.io/projected/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-kube-api-access-79t76\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.047088 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-log-httpd\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.047144 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-scripts\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.047327 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-run-httpd\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.149936 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.149998 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-config-data\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.150019 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.150056 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79t76\" (UniqueName: \"kubernetes.io/projected/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-kube-api-access-79t76\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.150080 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-log-httpd\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.150097 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-scripts\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.150131 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-run-httpd\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.150661 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-run-httpd\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.151792 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-log-httpd\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.162150 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.162456 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.162819 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-config-data\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.170449 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-scripts\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.173323 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79t76\" (UniqueName: \"kubernetes.io/projected/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-kube-api-access-79t76\") pod \"ceilometer-0\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.235112 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:31:02 crc kubenswrapper[4811]: I0128 17:31:02.825689 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:03 crc kubenswrapper[4811]: I0128 17:31:03.227070 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerStarted","Data":"d2487c28e61fb6cbbcda577c356b59993e50de04a20ce0f01f896e30e9c729bd"} Jan 28 17:31:04 crc kubenswrapper[4811]: I0128 17:31:04.238379 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerStarted","Data":"7f8b727b211f4b1030739bdaefbd71e777e3c25f32f473ce0f695bdac20919a8"} Jan 28 17:31:05 crc kubenswrapper[4811]: I0128 17:31:05.265375 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerStarted","Data":"bff06f2a49b9d8fe70a0838153b97789a11773e132044576fdfae34d235a3387"} Jan 28 17:31:06 crc kubenswrapper[4811]: I0128 17:31:06.275406 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerStarted","Data":"a2096624b8c3aebdc1fb588a98243bcb0c6d0f39fd0ea07308919cf568c20dc3"} Jan 28 17:31:08 crc kubenswrapper[4811]: I0128 17:31:08.296131 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerStarted","Data":"310d9fdd21fc68cf42e05e528bd00d4ed863640860b8b14357bddc5b3c4f6458"} Jan 28 17:31:08 crc kubenswrapper[4811]: I0128 17:31:08.296709 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 17:31:08 crc kubenswrapper[4811]: I0128 17:31:08.325712 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.588759554 podStartE2EDuration="7.325691051s" podCreationTimestamp="2026-01-28 17:31:01 +0000 UTC" firstStartedPulling="2026-01-28 17:31:02.8251825 +0000 UTC m=+6355.579546083" lastFinishedPulling="2026-01-28 17:31:07.562113997 +0000 UTC m=+6360.316477580" observedRunningTime="2026-01-28 17:31:08.312062453 +0000 UTC m=+6361.066426036" watchObservedRunningTime="2026-01-28 17:31:08.325691051 +0000 UTC m=+6361.080054634" Jan 28 17:31:10 crc kubenswrapper[4811]: I0128 17:31:10.340061 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:31:10 crc kubenswrapper[4811]: E0128 17:31:10.340595 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:31:10 crc kubenswrapper[4811]: I0128 17:31:10.657726 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 28 17:31:10 crc kubenswrapper[4811]: I0128 17:31:10.660556 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 28 17:31:11 crc kubenswrapper[4811]: I0128 17:31:11.327322 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 28 17:31:13 crc kubenswrapper[4811]: I0128 17:31:13.992299 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-h89sv"] Jan 28 17:31:13 crc kubenswrapper[4811]: I0128 17:31:13.994236 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.010570 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-h89sv"] Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.192956 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2jkh\" (UniqueName: \"kubernetes.io/projected/5091e50e-aea2-4b79-aadc-275cd468319d-kube-api-access-c2jkh\") pod \"aodh-db-create-h89sv\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.193367 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5091e50e-aea2-4b79-aadc-275cd468319d-operator-scripts\") pod \"aodh-db-create-h89sv\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.295914 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5091e50e-aea2-4b79-aadc-275cd468319d-operator-scripts\") pod \"aodh-db-create-h89sv\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.296118 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2jkh\" (UniqueName: \"kubernetes.io/projected/5091e50e-aea2-4b79-aadc-275cd468319d-kube-api-access-c2jkh\") pod \"aodh-db-create-h89sv\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.296995 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5091e50e-aea2-4b79-aadc-275cd468319d-operator-scripts\") pod \"aodh-db-create-h89sv\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.320154 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2jkh\" (UniqueName: \"kubernetes.io/projected/5091e50e-aea2-4b79-aadc-275cd468319d-kube-api-access-c2jkh\") pod \"aodh-db-create-h89sv\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.568588 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-845d-account-create-update-wml6w"] Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.569916 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.573078 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.582816 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-845d-account-create-update-wml6w"] Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.604722 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf9wq\" (UniqueName: \"kubernetes.io/projected/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-kube-api-access-kf9wq\") pod \"aodh-845d-account-create-update-wml6w\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.604903 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-operator-scripts\") pod \"aodh-845d-account-create-update-wml6w\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.616789 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.707017 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-operator-scripts\") pod \"aodh-845d-account-create-update-wml6w\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.707397 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf9wq\" (UniqueName: \"kubernetes.io/projected/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-kube-api-access-kf9wq\") pod \"aodh-845d-account-create-update-wml6w\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.707960 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-operator-scripts\") pod \"aodh-845d-account-create-update-wml6w\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.731134 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf9wq\" (UniqueName: \"kubernetes.io/projected/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-kube-api-access-kf9wq\") pod \"aodh-845d-account-create-update-wml6w\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:14 crc kubenswrapper[4811]: I0128 17:31:14.890278 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:15 crc kubenswrapper[4811]: I0128 17:31:15.123463 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-h89sv"] Jan 28 17:31:15 crc kubenswrapper[4811]: I0128 17:31:15.371264 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-h89sv" event={"ID":"5091e50e-aea2-4b79-aadc-275cd468319d","Type":"ContainerStarted","Data":"06af11448ccdae6ae772862180ab6ca7b05ee3c967e0ddea2d88038ba6230ea1"} Jan 28 17:31:15 crc kubenswrapper[4811]: I0128 17:31:15.371564 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-h89sv" event={"ID":"5091e50e-aea2-4b79-aadc-275cd468319d","Type":"ContainerStarted","Data":"3e148d6841a1dbaf0d137859a15cac65c1d46cd2ff2fcff97f3e8c86602410e0"} Jan 28 17:31:15 crc kubenswrapper[4811]: I0128 17:31:15.386807 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-create-h89sv" podStartSLOduration=2.386785593 podStartE2EDuration="2.386785593s" podCreationTimestamp="2026-01-28 17:31:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:31:15.386023323 +0000 UTC m=+6368.140386926" watchObservedRunningTime="2026-01-28 17:31:15.386785593 +0000 UTC m=+6368.141149176" Jan 28 17:31:16 crc kubenswrapper[4811]: I0128 17:31:16.052971 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-845d-account-create-update-wml6w"] Jan 28 17:31:16 crc kubenswrapper[4811]: I0128 17:31:16.381793 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-845d-account-create-update-wml6w" event={"ID":"9fb696c6-0e6d-4bc8-9da2-67876886c2a7","Type":"ContainerStarted","Data":"6ed14d1d7d2a52efddac8669a018fb568046d82002466f957b945bed9b69205c"} Jan 28 17:31:16 crc kubenswrapper[4811]: I0128 17:31:16.382120 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-845d-account-create-update-wml6w" event={"ID":"9fb696c6-0e6d-4bc8-9da2-67876886c2a7","Type":"ContainerStarted","Data":"6f27b55990fbaebc7f1b37ab4a2de1d6769a024d8fec4616c484a9fab3571a11"} Jan 28 17:31:16 crc kubenswrapper[4811]: I0128 17:31:16.384003 4811 generic.go:334] "Generic (PLEG): container finished" podID="5091e50e-aea2-4b79-aadc-275cd468319d" containerID="06af11448ccdae6ae772862180ab6ca7b05ee3c967e0ddea2d88038ba6230ea1" exitCode=0 Jan 28 17:31:16 crc kubenswrapper[4811]: I0128 17:31:16.384046 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-h89sv" event={"ID":"5091e50e-aea2-4b79-aadc-275cd468319d","Type":"ContainerDied","Data":"06af11448ccdae6ae772862180ab6ca7b05ee3c967e0ddea2d88038ba6230ea1"} Jan 28 17:31:16 crc kubenswrapper[4811]: I0128 17:31:16.416667 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-845d-account-create-update-wml6w" podStartSLOduration=2.416647483 podStartE2EDuration="2.416647483s" podCreationTimestamp="2026-01-28 17:31:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:31:16.395973354 +0000 UTC m=+6369.150336937" watchObservedRunningTime="2026-01-28 17:31:16.416647483 +0000 UTC m=+6369.171011056" Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.394143 4811 generic.go:334] "Generic (PLEG): container finished" podID="9fb696c6-0e6d-4bc8-9da2-67876886c2a7" containerID="6ed14d1d7d2a52efddac8669a018fb568046d82002466f957b945bed9b69205c" exitCode=0 Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.394193 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-845d-account-create-update-wml6w" event={"ID":"9fb696c6-0e6d-4bc8-9da2-67876886c2a7","Type":"ContainerDied","Data":"6ed14d1d7d2a52efddac8669a018fb568046d82002466f957b945bed9b69205c"} Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.812600 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.973252 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2jkh\" (UniqueName: \"kubernetes.io/projected/5091e50e-aea2-4b79-aadc-275cd468319d-kube-api-access-c2jkh\") pod \"5091e50e-aea2-4b79-aadc-275cd468319d\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.973588 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5091e50e-aea2-4b79-aadc-275cd468319d-operator-scripts\") pod \"5091e50e-aea2-4b79-aadc-275cd468319d\" (UID: \"5091e50e-aea2-4b79-aadc-275cd468319d\") " Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.974734 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5091e50e-aea2-4b79-aadc-275cd468319d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5091e50e-aea2-4b79-aadc-275cd468319d" (UID: "5091e50e-aea2-4b79-aadc-275cd468319d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:31:17 crc kubenswrapper[4811]: I0128 17:31:17.979234 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5091e50e-aea2-4b79-aadc-275cd468319d-kube-api-access-c2jkh" (OuterVolumeSpecName: "kube-api-access-c2jkh") pod "5091e50e-aea2-4b79-aadc-275cd468319d" (UID: "5091e50e-aea2-4b79-aadc-275cd468319d"). InnerVolumeSpecName "kube-api-access-c2jkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.077032 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2jkh\" (UniqueName: \"kubernetes.io/projected/5091e50e-aea2-4b79-aadc-275cd468319d-kube-api-access-c2jkh\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.077212 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5091e50e-aea2-4b79-aadc-275cd468319d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.406454 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-h89sv" event={"ID":"5091e50e-aea2-4b79-aadc-275cd468319d","Type":"ContainerDied","Data":"3e148d6841a1dbaf0d137859a15cac65c1d46cd2ff2fcff97f3e8c86602410e0"} Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.406507 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e148d6841a1dbaf0d137859a15cac65c1d46cd2ff2fcff97f3e8c86602410e0" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.406471 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-h89sv" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.889282 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.992606 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-operator-scripts\") pod \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.994310 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9fb696c6-0e6d-4bc8-9da2-67876886c2a7" (UID: "9fb696c6-0e6d-4bc8-9da2-67876886c2a7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.994546 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf9wq\" (UniqueName: \"kubernetes.io/projected/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-kube-api-access-kf9wq\") pod \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\" (UID: \"9fb696c6-0e6d-4bc8-9da2-67876886c2a7\") " Jan 28 17:31:18 crc kubenswrapper[4811]: I0128 17:31:18.995689 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.002892 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-kube-api-access-kf9wq" (OuterVolumeSpecName: "kube-api-access-kf9wq") pod "9fb696c6-0e6d-4bc8-9da2-67876886c2a7" (UID: "9fb696c6-0e6d-4bc8-9da2-67876886c2a7"). InnerVolumeSpecName "kube-api-access-kf9wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.043560 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-cfmpw"] Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.054158 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-13c9-account-create-update-mstz8"] Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.064163 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-cfmpw"] Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.075522 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-13c9-account-create-update-mstz8"] Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.096609 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf9wq\" (UniqueName: \"kubernetes.io/projected/9fb696c6-0e6d-4bc8-9da2-67876886c2a7-kube-api-access-kf9wq\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.430147 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-845d-account-create-update-wml6w" event={"ID":"9fb696c6-0e6d-4bc8-9da2-67876886c2a7","Type":"ContainerDied","Data":"6f27b55990fbaebc7f1b37ab4a2de1d6769a024d8fec4616c484a9fab3571a11"} Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.430203 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f27b55990fbaebc7f1b37ab4a2de1d6769a024d8fec4616c484a9fab3571a11" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.430278 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-845d-account-create-update-wml6w" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.907673 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-x45v9"] Jan 28 17:31:19 crc kubenswrapper[4811]: E0128 17:31:19.908268 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5091e50e-aea2-4b79-aadc-275cd468319d" containerName="mariadb-database-create" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.908290 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5091e50e-aea2-4b79-aadc-275cd468319d" containerName="mariadb-database-create" Jan 28 17:31:19 crc kubenswrapper[4811]: E0128 17:31:19.908326 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fb696c6-0e6d-4bc8-9da2-67876886c2a7" containerName="mariadb-account-create-update" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.908334 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fb696c6-0e6d-4bc8-9da2-67876886c2a7" containerName="mariadb-account-create-update" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.908612 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5091e50e-aea2-4b79-aadc-275cd468319d" containerName="mariadb-database-create" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.908635 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fb696c6-0e6d-4bc8-9da2-67876886c2a7" containerName="mariadb-account-create-update" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.909582 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.920135 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.920909 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-7lpw6" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.921261 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.921566 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 28 17:31:19 crc kubenswrapper[4811]: I0128 17:31:19.936214 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-x45v9"] Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.014760 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmtkt\" (UniqueName: \"kubernetes.io/projected/e4432726-194b-43c5-8403-e1ecb8cda9c5-kube-api-access-bmtkt\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.016060 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-config-data\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.016097 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-scripts\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.016138 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-combined-ca-bundle\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.117917 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmtkt\" (UniqueName: \"kubernetes.io/projected/e4432726-194b-43c5-8403-e1ecb8cda9c5-kube-api-access-bmtkt\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.118028 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-config-data\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.118062 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-scripts\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.118117 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-combined-ca-bundle\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.123179 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-combined-ca-bundle\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.123324 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-config-data\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.124085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-scripts\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.141216 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmtkt\" (UniqueName: \"kubernetes.io/projected/e4432726-194b-43c5-8403-e1ecb8cda9c5-kube-api-access-bmtkt\") pod \"aodh-db-sync-x45v9\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.239341 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.358602 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d9f16ac-879b-4d4e-885d-60725aa32ff0" path="/var/lib/kubelet/pods/3d9f16ac-879b-4d4e-885d-60725aa32ff0/volumes" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.360569 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edeedc0e-2bfa-41da-917a-3e4c0a476dce" path="/var/lib/kubelet/pods/edeedc0e-2bfa-41da-917a-3e4c0a476dce/volumes" Jan 28 17:31:20 crc kubenswrapper[4811]: I0128 17:31:20.749001 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-x45v9"] Jan 28 17:31:21 crc kubenswrapper[4811]: I0128 17:31:21.484639 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-x45v9" event={"ID":"e4432726-194b-43c5-8403-e1ecb8cda9c5","Type":"ContainerStarted","Data":"d4c723ac0e84360bec6bfb1c6c59275d787038fdbdb39357551327203b5310e9"} Jan 28 17:31:25 crc kubenswrapper[4811]: I0128 17:31:25.339760 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:31:25 crc kubenswrapper[4811]: E0128 17:31:25.340386 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:31:25 crc kubenswrapper[4811]: I0128 17:31:25.529278 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-x45v9" event={"ID":"e4432726-194b-43c5-8403-e1ecb8cda9c5","Type":"ContainerStarted","Data":"e1287edb0431b6d37fc62e015bc34f1437896e472fd002e5b10f6e64bc516d01"} Jan 28 17:31:25 crc kubenswrapper[4811]: I0128 17:31:25.552370 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-x45v9" podStartSLOduration=2.364129997 podStartE2EDuration="6.552351147s" podCreationTimestamp="2026-01-28 17:31:19 +0000 UTC" firstStartedPulling="2026-01-28 17:31:20.760946608 +0000 UTC m=+6373.515310191" lastFinishedPulling="2026-01-28 17:31:24.949167758 +0000 UTC m=+6377.703531341" observedRunningTime="2026-01-28 17:31:25.548838713 +0000 UTC m=+6378.303202306" watchObservedRunningTime="2026-01-28 17:31:25.552351147 +0000 UTC m=+6378.306714720" Jan 28 17:31:27 crc kubenswrapper[4811]: I0128 17:31:27.045136 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-5mqx7"] Jan 28 17:31:27 crc kubenswrapper[4811]: I0128 17:31:27.057222 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-5mqx7"] Jan 28 17:31:28 crc kubenswrapper[4811]: I0128 17:31:28.359158 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce15f0de-cdee-4305-a526-f8c9e146aa60" path="/var/lib/kubelet/pods/ce15f0de-cdee-4305-a526-f8c9e146aa60/volumes" Jan 28 17:31:28 crc kubenswrapper[4811]: I0128 17:31:28.559575 4811 generic.go:334] "Generic (PLEG): container finished" podID="e4432726-194b-43c5-8403-e1ecb8cda9c5" containerID="e1287edb0431b6d37fc62e015bc34f1437896e472fd002e5b10f6e64bc516d01" exitCode=0 Jan 28 17:31:28 crc kubenswrapper[4811]: I0128 17:31:28.559622 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-x45v9" event={"ID":"e4432726-194b-43c5-8403-e1ecb8cda9c5","Type":"ContainerDied","Data":"e1287edb0431b6d37fc62e015bc34f1437896e472fd002e5b10f6e64bc516d01"} Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.007546 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.060880 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-combined-ca-bundle\") pod \"e4432726-194b-43c5-8403-e1ecb8cda9c5\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.061302 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmtkt\" (UniqueName: \"kubernetes.io/projected/e4432726-194b-43c5-8403-e1ecb8cda9c5-kube-api-access-bmtkt\") pod \"e4432726-194b-43c5-8403-e1ecb8cda9c5\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.061490 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-config-data\") pod \"e4432726-194b-43c5-8403-e1ecb8cda9c5\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.061611 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-scripts\") pod \"e4432726-194b-43c5-8403-e1ecb8cda9c5\" (UID: \"e4432726-194b-43c5-8403-e1ecb8cda9c5\") " Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.068506 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4432726-194b-43c5-8403-e1ecb8cda9c5-kube-api-access-bmtkt" (OuterVolumeSpecName: "kube-api-access-bmtkt") pod "e4432726-194b-43c5-8403-e1ecb8cda9c5" (UID: "e4432726-194b-43c5-8403-e1ecb8cda9c5"). InnerVolumeSpecName "kube-api-access-bmtkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.069540 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-scripts" (OuterVolumeSpecName: "scripts") pod "e4432726-194b-43c5-8403-e1ecb8cda9c5" (UID: "e4432726-194b-43c5-8403-e1ecb8cda9c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.098700 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-config-data" (OuterVolumeSpecName: "config-data") pod "e4432726-194b-43c5-8403-e1ecb8cda9c5" (UID: "e4432726-194b-43c5-8403-e1ecb8cda9c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.103675 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4432726-194b-43c5-8403-e1ecb8cda9c5" (UID: "e4432726-194b-43c5-8403-e1ecb8cda9c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.164609 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.165128 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmtkt\" (UniqueName: \"kubernetes.io/projected/e4432726-194b-43c5-8403-e1ecb8cda9c5-kube-api-access-bmtkt\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.165184 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.165232 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4432726-194b-43c5-8403-e1ecb8cda9c5-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.581895 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-x45v9" event={"ID":"e4432726-194b-43c5-8403-e1ecb8cda9c5","Type":"ContainerDied","Data":"d4c723ac0e84360bec6bfb1c6c59275d787038fdbdb39357551327203b5310e9"} Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.581934 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4c723ac0e84360bec6bfb1c6c59275d787038fdbdb39357551327203b5310e9" Jan 28 17:31:30 crc kubenswrapper[4811]: I0128 17:31:30.581990 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-x45v9" Jan 28 17:31:32 crc kubenswrapper[4811]: I0128 17:31:32.241580 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.894009 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Jan 28 17:31:33 crc kubenswrapper[4811]: E0128 17:31:33.895003 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4432726-194b-43c5-8403-e1ecb8cda9c5" containerName="aodh-db-sync" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.895020 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4432726-194b-43c5-8403-e1ecb8cda9c5" containerName="aodh-db-sync" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.895298 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4432726-194b-43c5-8403-e1ecb8cda9c5" containerName="aodh-db-sync" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.897406 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.901132 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.901258 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.901530 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-7lpw6" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.907829 4811 scope.go:117] "RemoveContainer" containerID="f7b0f301f999bd32dd4cb871a158852f7a0d128b505f16a0edbd22dfa1209757" Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.908303 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 28 17:31:33 crc kubenswrapper[4811]: I0128 17:31:33.962527 4811 scope.go:117] "RemoveContainer" containerID="c82ff5392b67c12645ec2497ef567d229bf549428cce643b04be5cd087d861d7" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.041000 4811 scope.go:117] "RemoveContainer" containerID="b9c690e72a48f4814450f69b4e8231d998072f21fb9eabf4df7c871f0031c1f4" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.069374 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-combined-ca-bundle\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.069534 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-scripts\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.069577 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-config-data\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.069638 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgfzn\" (UniqueName: \"kubernetes.io/projected/0fd0af80-eaab-4937-b501-6b8f67cdce10-kube-api-access-wgfzn\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.094547 4811 scope.go:117] "RemoveContainer" containerID="e5be4970d675ea3a1b30f54d59a28b25c5eba929d4ca769f28d06abc1a9bbce6" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.132567 4811 scope.go:117] "RemoveContainer" containerID="42ea12e57d1b458c7b857839c782e307c67ae0ed120a8ba54f6d34339c29e6d9" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.171089 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-scripts\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.171166 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-config-data\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.171260 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgfzn\" (UniqueName: \"kubernetes.io/projected/0fd0af80-eaab-4937-b501-6b8f67cdce10-kube-api-access-wgfzn\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.171330 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-combined-ca-bundle\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.177570 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-combined-ca-bundle\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.179259 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-scripts\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.179760 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fd0af80-eaab-4937-b501-6b8f67cdce10-config-data\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.192964 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgfzn\" (UniqueName: \"kubernetes.io/projected/0fd0af80-eaab-4937-b501-6b8f67cdce10-kube-api-access-wgfzn\") pod \"aodh-0\" (UID: \"0fd0af80-eaab-4937-b501-6b8f67cdce10\") " pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.226968 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 28 17:31:34 crc kubenswrapper[4811]: I0128 17:31:34.800070 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.558502 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.559160 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-central-agent" containerID="cri-o://7f8b727b211f4b1030739bdaefbd71e777e3c25f32f473ce0f695bdac20919a8" gracePeriod=30 Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.559259 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="sg-core" containerID="cri-o://a2096624b8c3aebdc1fb588a98243bcb0c6d0f39fd0ea07308919cf568c20dc3" gracePeriod=30 Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.559328 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="proxy-httpd" containerID="cri-o://310d9fdd21fc68cf42e05e528bd00d4ed863640860b8b14357bddc5b3c4f6458" gracePeriod=30 Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.559260 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-notification-agent" containerID="cri-o://bff06f2a49b9d8fe70a0838153b97789a11773e132044576fdfae34d235a3387" gracePeriod=30 Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.632889 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"0fd0af80-eaab-4937-b501-6b8f67cdce10","Type":"ContainerStarted","Data":"98997ff2e3bd757bacb6feb019e2f8d857726ed0abee6a770e6b934384b63922"} Jan 28 17:31:35 crc kubenswrapper[4811]: I0128 17:31:35.633208 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"0fd0af80-eaab-4937-b501-6b8f67cdce10","Type":"ContainerStarted","Data":"76b284ccc043c34ace5065d6cecec9240441f722e348c021509aa3e8d67a2f40"} Jan 28 17:31:36 crc kubenswrapper[4811]: I0128 17:31:36.650654 4811 generic.go:334] "Generic (PLEG): container finished" podID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerID="310d9fdd21fc68cf42e05e528bd00d4ed863640860b8b14357bddc5b3c4f6458" exitCode=0 Jan 28 17:31:36 crc kubenswrapper[4811]: I0128 17:31:36.650972 4811 generic.go:334] "Generic (PLEG): container finished" podID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerID="a2096624b8c3aebdc1fb588a98243bcb0c6d0f39fd0ea07308919cf568c20dc3" exitCode=2 Jan 28 17:31:36 crc kubenswrapper[4811]: I0128 17:31:36.650981 4811 generic.go:334] "Generic (PLEG): container finished" podID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerID="7f8b727b211f4b1030739bdaefbd71e777e3c25f32f473ce0f695bdac20919a8" exitCode=0 Jan 28 17:31:36 crc kubenswrapper[4811]: I0128 17:31:36.650729 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerDied","Data":"310d9fdd21fc68cf42e05e528bd00d4ed863640860b8b14357bddc5b3c4f6458"} Jan 28 17:31:36 crc kubenswrapper[4811]: I0128 17:31:36.651014 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerDied","Data":"a2096624b8c3aebdc1fb588a98243bcb0c6d0f39fd0ea07308919cf568c20dc3"} Jan 28 17:31:36 crc kubenswrapper[4811]: I0128 17:31:36.651025 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerDied","Data":"7f8b727b211f4b1030739bdaefbd71e777e3c25f32f473ce0f695bdac20919a8"} Jan 28 17:31:37 crc kubenswrapper[4811]: I0128 17:31:37.661194 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"0fd0af80-eaab-4937-b501-6b8f67cdce10","Type":"ContainerStarted","Data":"9ce64f2fc88d0647e1413bfd6d01cd54c40e90b66f51562ec8ffc8df423d7e2f"} Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.681383 4811 generic.go:334] "Generic (PLEG): container finished" podID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerID="bff06f2a49b9d8fe70a0838153b97789a11773e132044576fdfae34d235a3387" exitCode=0 Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.681535 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerDied","Data":"bff06f2a49b9d8fe70a0838153b97789a11773e132044576fdfae34d235a3387"} Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.684876 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"0fd0af80-eaab-4937-b501-6b8f67cdce10","Type":"ContainerStarted","Data":"c1f8a3db158c680ceb472c7e96bda26ad654ad67efce3d46c755247c4b826a43"} Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.763628 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815206 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-scripts\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815323 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-sg-core-conf-yaml\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815349 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-config-data\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815392 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-combined-ca-bundle\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815600 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-run-httpd\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815638 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79t76\" (UniqueName: \"kubernetes.io/projected/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-kube-api-access-79t76\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.815662 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-log-httpd\") pod \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\" (UID: \"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25\") " Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.816915 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.817205 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.823103 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-kube-api-access-79t76" (OuterVolumeSpecName: "kube-api-access-79t76") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "kube-api-access-79t76". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.822878 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-scripts" (OuterVolumeSpecName: "scripts") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.848605 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.914516 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.918744 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.918786 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.918800 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.918813 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.918856 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79t76\" (UniqueName: \"kubernetes.io/projected/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-kube-api-access-79t76\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.918870 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:39 crc kubenswrapper[4811]: I0128 17:31:39.934048 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-config-data" (OuterVolumeSpecName: "config-data") pod "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" (UID: "8d7f752e-c4de-48b3-89d3-77d2b0f0dc25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.021719 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.340419 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:31:40 crc kubenswrapper[4811]: E0128 17:31:40.340994 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.699505 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d7f752e-c4de-48b3-89d3-77d2b0f0dc25","Type":"ContainerDied","Data":"d2487c28e61fb6cbbcda577c356b59993e50de04a20ce0f01f896e30e9c729bd"} Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.699567 4811 scope.go:117] "RemoveContainer" containerID="310d9fdd21fc68cf42e05e528bd00d4ed863640860b8b14357bddc5b3c4f6458" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.699769 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.725944 4811 scope.go:117] "RemoveContainer" containerID="a2096624b8c3aebdc1fb588a98243bcb0c6d0f39fd0ea07308919cf568c20dc3" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.732967 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.750964 4811 scope.go:117] "RemoveContainer" containerID="bff06f2a49b9d8fe70a0838153b97789a11773e132044576fdfae34d235a3387" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.763554 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.777158 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:40 crc kubenswrapper[4811]: E0128 17:31:40.777717 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-central-agent" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.777743 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-central-agent" Jan 28 17:31:40 crc kubenswrapper[4811]: E0128 17:31:40.777762 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-notification-agent" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.777772 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-notification-agent" Jan 28 17:31:40 crc kubenswrapper[4811]: E0128 17:31:40.777788 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="sg-core" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.777796 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="sg-core" Jan 28 17:31:40 crc kubenswrapper[4811]: E0128 17:31:40.777835 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="proxy-httpd" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.777847 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="proxy-httpd" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.778089 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="sg-core" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.778116 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-central-agent" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.778138 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="proxy-httpd" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.778149 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" containerName="ceilometer-notification-agent" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.780363 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.782526 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.783115 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.786560 4811 scope.go:117] "RemoveContainer" containerID="7f8b727b211f4b1030739bdaefbd71e777e3c25f32f473ce0f695bdac20919a8" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.789125 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.849725 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-scripts\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.850090 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.850138 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-config-data\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.850182 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-log-httpd\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.850222 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.850252 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6mnj\" (UniqueName: \"kubernetes.io/projected/0605a185-82b4-481b-bbd7-55f0f8be336a-kube-api-access-b6mnj\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.850271 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-run-httpd\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.951794 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-config-data\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952058 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-log-httpd\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952188 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952303 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6mnj\" (UniqueName: \"kubernetes.io/projected/0605a185-82b4-481b-bbd7-55f0f8be336a-kube-api-access-b6mnj\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952399 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-run-httpd\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952502 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-log-httpd\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952647 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-scripts\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952777 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.952904 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-run-httpd\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.959346 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.959751 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-scripts\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.959827 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.972601 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-config-data\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:40 crc kubenswrapper[4811]: I0128 17:31:40.982596 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6mnj\" (UniqueName: \"kubernetes.io/projected/0605a185-82b4-481b-bbd7-55f0f8be336a-kube-api-access-b6mnj\") pod \"ceilometer-0\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " pod="openstack/ceilometer-0" Jan 28 17:31:41 crc kubenswrapper[4811]: I0128 17:31:41.103070 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:31:41 crc kubenswrapper[4811]: I0128 17:31:41.666008 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:31:41 crc kubenswrapper[4811]: W0128 17:31:41.679209 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0605a185_82b4_481b_bbd7_55f0f8be336a.slice/crio-604d9ed0bfaff3f1176e82108c67fa24228cd0f87019e8e2b2cf5298ec11c47f WatchSource:0}: Error finding container 604d9ed0bfaff3f1176e82108c67fa24228cd0f87019e8e2b2cf5298ec11c47f: Status 404 returned error can't find the container with id 604d9ed0bfaff3f1176e82108c67fa24228cd0f87019e8e2b2cf5298ec11c47f Jan 28 17:31:41 crc kubenswrapper[4811]: I0128 17:31:41.710603 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerStarted","Data":"604d9ed0bfaff3f1176e82108c67fa24228cd0f87019e8e2b2cf5298ec11c47f"} Jan 28 17:31:41 crc kubenswrapper[4811]: I0128 17:31:41.714591 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"0fd0af80-eaab-4937-b501-6b8f67cdce10","Type":"ContainerStarted","Data":"a977b11560e63ac5b23296df628ac7d23113c532f346ea17765c220270dba0dd"} Jan 28 17:31:41 crc kubenswrapper[4811]: I0128 17:31:41.744868 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=3.078510737 podStartE2EDuration="8.744842259s" podCreationTimestamp="2026-01-28 17:31:33 +0000 UTC" firstStartedPulling="2026-01-28 17:31:34.808475947 +0000 UTC m=+6387.562839530" lastFinishedPulling="2026-01-28 17:31:40.474807459 +0000 UTC m=+6393.229171052" observedRunningTime="2026-01-28 17:31:41.740568544 +0000 UTC m=+6394.494932127" watchObservedRunningTime="2026-01-28 17:31:41.744842259 +0000 UTC m=+6394.499205842" Jan 28 17:31:42 crc kubenswrapper[4811]: I0128 17:31:42.352598 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d7f752e-c4de-48b3-89d3-77d2b0f0dc25" path="/var/lib/kubelet/pods/8d7f752e-c4de-48b3-89d3-77d2b0f0dc25/volumes" Jan 28 17:31:42 crc kubenswrapper[4811]: I0128 17:31:42.728377 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerStarted","Data":"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3"} Jan 28 17:31:44 crc kubenswrapper[4811]: I0128 17:31:44.758302 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerStarted","Data":"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff"} Jan 28 17:31:44 crc kubenswrapper[4811]: I0128 17:31:44.758869 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerStarted","Data":"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1"} Jan 28 17:31:47 crc kubenswrapper[4811]: I0128 17:31:47.788125 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerStarted","Data":"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0"} Jan 28 17:31:47 crc kubenswrapper[4811]: I0128 17:31:47.789877 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 17:31:47 crc kubenswrapper[4811]: I0128 17:31:47.812888 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.91349198 podStartE2EDuration="7.812866086s" podCreationTimestamp="2026-01-28 17:31:40 +0000 UTC" firstStartedPulling="2026-01-28 17:31:41.682490215 +0000 UTC m=+6394.436853798" lastFinishedPulling="2026-01-28 17:31:46.581864321 +0000 UTC m=+6399.336227904" observedRunningTime="2026-01-28 17:31:47.81003582 +0000 UTC m=+6400.564399403" watchObservedRunningTime="2026-01-28 17:31:47.812866086 +0000 UTC m=+6400.567229669" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.459657 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-9vf2j"] Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.461300 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.474066 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-9vf2j"] Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.543856 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv89x\" (UniqueName: \"kubernetes.io/projected/af23e46d-fc06-409c-ab89-9da29e7c7eae-kube-api-access-wv89x\") pod \"manila-db-create-9vf2j\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.544021 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af23e46d-fc06-409c-ab89-9da29e7c7eae-operator-scripts\") pod \"manila-db-create-9vf2j\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.564591 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-d4f7-account-create-update-lnp5z"] Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.566771 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.570925 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.581159 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-d4f7-account-create-update-lnp5z"] Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.646145 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv89x\" (UniqueName: \"kubernetes.io/projected/af23e46d-fc06-409c-ab89-9da29e7c7eae-kube-api-access-wv89x\") pod \"manila-db-create-9vf2j\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.646362 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af23e46d-fc06-409c-ab89-9da29e7c7eae-operator-scripts\") pod \"manila-db-create-9vf2j\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.647322 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af23e46d-fc06-409c-ab89-9da29e7c7eae-operator-scripts\") pod \"manila-db-create-9vf2j\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.664704 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv89x\" (UniqueName: \"kubernetes.io/projected/af23e46d-fc06-409c-ab89-9da29e7c7eae-kube-api-access-wv89x\") pod \"manila-db-create-9vf2j\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.748130 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-operator-scripts\") pod \"manila-d4f7-account-create-update-lnp5z\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.748334 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j6cv\" (UniqueName: \"kubernetes.io/projected/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-kube-api-access-4j6cv\") pod \"manila-d4f7-account-create-update-lnp5z\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.780307 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.850640 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j6cv\" (UniqueName: \"kubernetes.io/projected/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-kube-api-access-4j6cv\") pod \"manila-d4f7-account-create-update-lnp5z\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.850877 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-operator-scripts\") pod \"manila-d4f7-account-create-update-lnp5z\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.851917 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-operator-scripts\") pod \"manila-d4f7-account-create-update-lnp5z\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.875299 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j6cv\" (UniqueName: \"kubernetes.io/projected/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-kube-api-access-4j6cv\") pod \"manila-d4f7-account-create-update-lnp5z\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:49 crc kubenswrapper[4811]: I0128 17:31:49.887024 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.448134 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-d4f7-account-create-update-lnp5z"] Jan 28 17:31:50 crc kubenswrapper[4811]: W0128 17:31:50.452566 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f548650_0d4f_4bb2_87e5_24f2e4291eeb.slice/crio-14a1f21e7ceaae6ff4991344dcdac7099ed30958c4b97df0b5f3ddb10075a593 WatchSource:0}: Error finding container 14a1f21e7ceaae6ff4991344dcdac7099ed30958c4b97df0b5f3ddb10075a593: Status 404 returned error can't find the container with id 14a1f21e7ceaae6ff4991344dcdac7099ed30958c4b97df0b5f3ddb10075a593 Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.489775 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-9vf2j"] Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.827103 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-9vf2j" event={"ID":"af23e46d-fc06-409c-ab89-9da29e7c7eae","Type":"ContainerStarted","Data":"eb7e9bc5c819711b5ae24de1b9c8d76ad77a47a050e17813ff1235c17b2c8b83"} Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.827462 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-9vf2j" event={"ID":"af23e46d-fc06-409c-ab89-9da29e7c7eae","Type":"ContainerStarted","Data":"0027c0e253b3416cfd2d5123c04e7f82ab8dd65734b5e38a1f8171695883411c"} Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.830651 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-d4f7-account-create-update-lnp5z" event={"ID":"4f548650-0d4f-4bb2-87e5-24f2e4291eeb","Type":"ContainerStarted","Data":"0d1ea42c2c9771661ba140247df0064e66e8e14ac8ac180f2f157e89c7e01d5f"} Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.830692 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-d4f7-account-create-update-lnp5z" event={"ID":"4f548650-0d4f-4bb2-87e5-24f2e4291eeb","Type":"ContainerStarted","Data":"14a1f21e7ceaae6ff4991344dcdac7099ed30958c4b97df0b5f3ddb10075a593"} Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.849847 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-9vf2j" podStartSLOduration=1.849826514 podStartE2EDuration="1.849826514s" podCreationTimestamp="2026-01-28 17:31:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:31:50.847888362 +0000 UTC m=+6403.602251935" watchObservedRunningTime="2026-01-28 17:31:50.849826514 +0000 UTC m=+6403.604190097" Jan 28 17:31:50 crc kubenswrapper[4811]: I0128 17:31:50.890782 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-d4f7-account-create-update-lnp5z" podStartSLOduration=1.89076166 podStartE2EDuration="1.89076166s" podCreationTimestamp="2026-01-28 17:31:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:31:50.879923928 +0000 UTC m=+6403.634287511" watchObservedRunningTime="2026-01-28 17:31:50.89076166 +0000 UTC m=+6403.645125243" Jan 28 17:31:51 crc kubenswrapper[4811]: I0128 17:31:51.840461 4811 generic.go:334] "Generic (PLEG): container finished" podID="af23e46d-fc06-409c-ab89-9da29e7c7eae" containerID="eb7e9bc5c819711b5ae24de1b9c8d76ad77a47a050e17813ff1235c17b2c8b83" exitCode=0 Jan 28 17:31:51 crc kubenswrapper[4811]: I0128 17:31:51.840766 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-9vf2j" event={"ID":"af23e46d-fc06-409c-ab89-9da29e7c7eae","Type":"ContainerDied","Data":"eb7e9bc5c819711b5ae24de1b9c8d76ad77a47a050e17813ff1235c17b2c8b83"} Jan 28 17:31:51 crc kubenswrapper[4811]: I0128 17:31:51.843468 4811 generic.go:334] "Generic (PLEG): container finished" podID="4f548650-0d4f-4bb2-87e5-24f2e4291eeb" containerID="0d1ea42c2c9771661ba140247df0064e66e8e14ac8ac180f2f157e89c7e01d5f" exitCode=0 Jan 28 17:31:51 crc kubenswrapper[4811]: I0128 17:31:51.843519 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-d4f7-account-create-update-lnp5z" event={"ID":"4f548650-0d4f-4bb2-87e5-24f2e4291eeb","Type":"ContainerDied","Data":"0d1ea42c2c9771661ba140247df0064e66e8e14ac8ac180f2f157e89c7e01d5f"} Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.336414 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.444747 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-operator-scripts\") pod \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.445882 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4j6cv\" (UniqueName: \"kubernetes.io/projected/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-kube-api-access-4j6cv\") pod \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\" (UID: \"4f548650-0d4f-4bb2-87e5-24f2e4291eeb\") " Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.446132 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f548650-0d4f-4bb2-87e5-24f2e4291eeb" (UID: "4f548650-0d4f-4bb2-87e5-24f2e4291eeb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.446574 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.452131 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-kube-api-access-4j6cv" (OuterVolumeSpecName: "kube-api-access-4j6cv") pod "4f548650-0d4f-4bb2-87e5-24f2e4291eeb" (UID: "4f548650-0d4f-4bb2-87e5-24f2e4291eeb"). InnerVolumeSpecName "kube-api-access-4j6cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.548734 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4j6cv\" (UniqueName: \"kubernetes.io/projected/4f548650-0d4f-4bb2-87e5-24f2e4291eeb-kube-api-access-4j6cv\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.563145 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.751470 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv89x\" (UniqueName: \"kubernetes.io/projected/af23e46d-fc06-409c-ab89-9da29e7c7eae-kube-api-access-wv89x\") pod \"af23e46d-fc06-409c-ab89-9da29e7c7eae\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.751598 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af23e46d-fc06-409c-ab89-9da29e7c7eae-operator-scripts\") pod \"af23e46d-fc06-409c-ab89-9da29e7c7eae\" (UID: \"af23e46d-fc06-409c-ab89-9da29e7c7eae\") " Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.752980 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af23e46d-fc06-409c-ab89-9da29e7c7eae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af23e46d-fc06-409c-ab89-9da29e7c7eae" (UID: "af23e46d-fc06-409c-ab89-9da29e7c7eae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.768975 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af23e46d-fc06-409c-ab89-9da29e7c7eae-kube-api-access-wv89x" (OuterVolumeSpecName: "kube-api-access-wv89x") pod "af23e46d-fc06-409c-ab89-9da29e7c7eae" (UID: "af23e46d-fc06-409c-ab89-9da29e7c7eae"). InnerVolumeSpecName "kube-api-access-wv89x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.854060 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv89x\" (UniqueName: \"kubernetes.io/projected/af23e46d-fc06-409c-ab89-9da29e7c7eae-kube-api-access-wv89x\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.854126 4811 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af23e46d-fc06-409c-ab89-9da29e7c7eae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.865520 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-9vf2j" event={"ID":"af23e46d-fc06-409c-ab89-9da29e7c7eae","Type":"ContainerDied","Data":"0027c0e253b3416cfd2d5123c04e7f82ab8dd65734b5e38a1f8171695883411c"} Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.865577 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0027c0e253b3416cfd2d5123c04e7f82ab8dd65734b5e38a1f8171695883411c" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.865544 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-9vf2j" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.867522 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-d4f7-account-create-update-lnp5z" event={"ID":"4f548650-0d4f-4bb2-87e5-24f2e4291eeb","Type":"ContainerDied","Data":"14a1f21e7ceaae6ff4991344dcdac7099ed30958c4b97df0b5f3ddb10075a593"} Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.867566 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14a1f21e7ceaae6ff4991344dcdac7099ed30958c4b97df0b5f3ddb10075a593" Jan 28 17:31:53 crc kubenswrapper[4811]: I0128 17:31:53.867623 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-d4f7-account-create-update-lnp5z" Jan 28 17:31:53 crc kubenswrapper[4811]: E0128 17:31:53.982248 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf23e46d_fc06_409c_ab89_9da29e7c7eae.slice\": RecentStats: unable to find data in memory cache]" Jan 28 17:31:54 crc kubenswrapper[4811]: I0128 17:31:54.341614 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:31:54 crc kubenswrapper[4811]: E0128 17:31:54.342497 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.002280 4811 trace.go:236] Trace[1334531995]: "Calculate volume metrics of ovndbcluster-nb-etc-ovn for pod openstack/ovsdbserver-nb-0" (28-Jan-2026 17:31:58.753) (total time: 4249ms): Jan 28 17:32:03 crc kubenswrapper[4811]: Trace[1334531995]: [4.249090524s] [4.249090524s] END Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.108033 4811 trace.go:236] Trace[1275170551]: "Calculate volume metrics of ovn-data for pod openstack/ovn-copy-data" (28-Jan-2026 17:32:00.892) (total time: 2215ms): Jan 28 17:32:03 crc kubenswrapper[4811]: Trace[1275170551]: [2.215401877s] [2.215401877s] END Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.432155 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-wbztr"] Jan 28 17:32:03 crc kubenswrapper[4811]: E0128 17:32:03.432867 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f548650-0d4f-4bb2-87e5-24f2e4291eeb" containerName="mariadb-account-create-update" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.432895 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f548650-0d4f-4bb2-87e5-24f2e4291eeb" containerName="mariadb-account-create-update" Jan 28 17:32:03 crc kubenswrapper[4811]: E0128 17:32:03.432933 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af23e46d-fc06-409c-ab89-9da29e7c7eae" containerName="mariadb-database-create" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.432943 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="af23e46d-fc06-409c-ab89-9da29e7c7eae" containerName="mariadb-database-create" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.433162 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f548650-0d4f-4bb2-87e5-24f2e4291eeb" containerName="mariadb-account-create-update" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.433205 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="af23e46d-fc06-409c-ab89-9da29e7c7eae" containerName="mariadb-database-create" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.434102 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.437074 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-rcfcx" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.437263 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.454175 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-wbztr"] Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.562967 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-config-data\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.563089 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfpwr\" (UniqueName: \"kubernetes.io/projected/d47a9735-0b67-4c06-812c-07bd37e9002d-kube-api-access-qfpwr\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.563114 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-combined-ca-bundle\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.563153 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-job-config-data\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.664864 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-config-data\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.665073 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfpwr\" (UniqueName: \"kubernetes.io/projected/d47a9735-0b67-4c06-812c-07bd37e9002d-kube-api-access-qfpwr\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.665411 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-combined-ca-bundle\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.666410 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-job-config-data\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.671690 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-job-config-data\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.672062 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-combined-ca-bundle\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.673318 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-config-data\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.686185 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfpwr\" (UniqueName: \"kubernetes.io/projected/d47a9735-0b67-4c06-812c-07bd37e9002d-kube-api-access-qfpwr\") pod \"manila-db-sync-wbztr\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:03 crc kubenswrapper[4811]: I0128 17:32:03.753245 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:04 crc kubenswrapper[4811]: I0128 17:32:04.886782 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-wbztr"] Jan 28 17:32:04 crc kubenswrapper[4811]: W0128 17:32:04.894408 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd47a9735_0b67_4c06_812c_07bd37e9002d.slice/crio-382ef690218aac25bf8a0f34b7af6e0b1123266b4496603bb9d877c291c84ba0 WatchSource:0}: Error finding container 382ef690218aac25bf8a0f34b7af6e0b1123266b4496603bb9d877c291c84ba0: Status 404 returned error can't find the container with id 382ef690218aac25bf8a0f34b7af6e0b1123266b4496603bb9d877c291c84ba0 Jan 28 17:32:04 crc kubenswrapper[4811]: I0128 17:32:04.976525 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wbztr" event={"ID":"d47a9735-0b67-4c06-812c-07bd37e9002d","Type":"ContainerStarted","Data":"382ef690218aac25bf8a0f34b7af6e0b1123266b4496603bb9d877c291c84ba0"} Jan 28 17:32:07 crc kubenswrapper[4811]: I0128 17:32:07.339074 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:32:07 crc kubenswrapper[4811]: E0128 17:32:07.339599 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.330562 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-26j9c"] Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.336687 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.354289 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-26j9c"] Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.469122 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-utilities\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.469345 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-catalog-content\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.469386 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk69l\" (UniqueName: \"kubernetes.io/projected/7c66b660-0464-4966-b9e7-9002f3af9163-kube-api-access-kk69l\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.571236 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-catalog-content\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.571291 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk69l\" (UniqueName: \"kubernetes.io/projected/7c66b660-0464-4966-b9e7-9002f3af9163-kube-api-access-kk69l\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.571425 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-utilities\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.571926 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-utilities\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.572187 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-catalog-content\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.594817 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk69l\" (UniqueName: \"kubernetes.io/projected/7c66b660-0464-4966-b9e7-9002f3af9163-kube-api-access-kk69l\") pod \"redhat-operators-26j9c\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:08 crc kubenswrapper[4811]: I0128 17:32:08.674059 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:10 crc kubenswrapper[4811]: I0128 17:32:10.830994 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-26j9c"] Jan 28 17:32:10 crc kubenswrapper[4811]: W0128 17:32:10.835800 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c66b660_0464_4966_b9e7_9002f3af9163.slice/crio-4da762ac20e3b8d5804ab9b6ca2a7ccd6357bc428ac92ddcb1956b41dbbd5dad WatchSource:0}: Error finding container 4da762ac20e3b8d5804ab9b6ca2a7ccd6357bc428ac92ddcb1956b41dbbd5dad: Status 404 returned error can't find the container with id 4da762ac20e3b8d5804ab9b6ca2a7ccd6357bc428ac92ddcb1956b41dbbd5dad Jan 28 17:32:11 crc kubenswrapper[4811]: I0128 17:32:11.040294 4811 generic.go:334] "Generic (PLEG): container finished" podID="7c66b660-0464-4966-b9e7-9002f3af9163" containerID="4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712" exitCode=0 Jan 28 17:32:11 crc kubenswrapper[4811]: I0128 17:32:11.040491 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerDied","Data":"4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712"} Jan 28 17:32:11 crc kubenswrapper[4811]: I0128 17:32:11.040601 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerStarted","Data":"4da762ac20e3b8d5804ab9b6ca2a7ccd6357bc428ac92ddcb1956b41dbbd5dad"} Jan 28 17:32:11 crc kubenswrapper[4811]: I0128 17:32:11.110779 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 28 17:32:12 crc kubenswrapper[4811]: I0128 17:32:12.051930 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wbztr" event={"ID":"d47a9735-0b67-4c06-812c-07bd37e9002d","Type":"ContainerStarted","Data":"651b973e5990a6b7476d0762d53a2a169d8a1e830c576151f159af22bb494fe1"} Jan 28 17:32:12 crc kubenswrapper[4811]: I0128 17:32:12.080342 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-wbztr" podStartSLOduration=3.657849757 podStartE2EDuration="9.080324779s" podCreationTimestamp="2026-01-28 17:32:03 +0000 UTC" firstStartedPulling="2026-01-28 17:32:04.896486889 +0000 UTC m=+6417.650850462" lastFinishedPulling="2026-01-28 17:32:10.318961901 +0000 UTC m=+6423.073325484" observedRunningTime="2026-01-28 17:32:12.070822242 +0000 UTC m=+6424.825185825" watchObservedRunningTime="2026-01-28 17:32:12.080324779 +0000 UTC m=+6424.834688362" Jan 28 17:32:13 crc kubenswrapper[4811]: I0128 17:32:13.064112 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerStarted","Data":"5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1"} Jan 28 17:32:15 crc kubenswrapper[4811]: I0128 17:32:15.089477 4811 generic.go:334] "Generic (PLEG): container finished" podID="d47a9735-0b67-4c06-812c-07bd37e9002d" containerID="651b973e5990a6b7476d0762d53a2a169d8a1e830c576151f159af22bb494fe1" exitCode=0 Jan 28 17:32:15 crc kubenswrapper[4811]: I0128 17:32:15.089565 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wbztr" event={"ID":"d47a9735-0b67-4c06-812c-07bd37e9002d","Type":"ContainerDied","Data":"651b973e5990a6b7476d0762d53a2a169d8a1e830c576151f159af22bb494fe1"} Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.601044 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.650855 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfpwr\" (UniqueName: \"kubernetes.io/projected/d47a9735-0b67-4c06-812c-07bd37e9002d-kube-api-access-qfpwr\") pod \"d47a9735-0b67-4c06-812c-07bd37e9002d\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.650973 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-combined-ca-bundle\") pod \"d47a9735-0b67-4c06-812c-07bd37e9002d\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.651005 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-config-data\") pod \"d47a9735-0b67-4c06-812c-07bd37e9002d\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.651227 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-job-config-data\") pod \"d47a9735-0b67-4c06-812c-07bd37e9002d\" (UID: \"d47a9735-0b67-4c06-812c-07bd37e9002d\") " Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.661748 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "d47a9735-0b67-4c06-812c-07bd37e9002d" (UID: "d47a9735-0b67-4c06-812c-07bd37e9002d"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.661871 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d47a9735-0b67-4c06-812c-07bd37e9002d-kube-api-access-qfpwr" (OuterVolumeSpecName: "kube-api-access-qfpwr") pod "d47a9735-0b67-4c06-812c-07bd37e9002d" (UID: "d47a9735-0b67-4c06-812c-07bd37e9002d"). InnerVolumeSpecName "kube-api-access-qfpwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.680758 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-config-data" (OuterVolumeSpecName: "config-data") pod "d47a9735-0b67-4c06-812c-07bd37e9002d" (UID: "d47a9735-0b67-4c06-812c-07bd37e9002d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.684766 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d47a9735-0b67-4c06-812c-07bd37e9002d" (UID: "d47a9735-0b67-4c06-812c-07bd37e9002d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.754551 4811 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-job-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.754605 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfpwr\" (UniqueName: \"kubernetes.io/projected/d47a9735-0b67-4c06-812c-07bd37e9002d-kube-api-access-qfpwr\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.754615 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:16 crc kubenswrapper[4811]: I0128 17:32:16.754623 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d47a9735-0b67-4c06-812c-07bd37e9002d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.109078 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wbztr" event={"ID":"d47a9735-0b67-4c06-812c-07bd37e9002d","Type":"ContainerDied","Data":"382ef690218aac25bf8a0f34b7af6e0b1123266b4496603bb9d877c291c84ba0"} Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.109125 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="382ef690218aac25bf8a0f34b7af6e0b1123266b4496603bb9d877c291c84ba0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.109136 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wbztr" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.488739 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Jan 28 17:32:17 crc kubenswrapper[4811]: E0128 17:32:17.489218 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d47a9735-0b67-4c06-812c-07bd37e9002d" containerName="manila-db-sync" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.489232 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d47a9735-0b67-4c06-812c-07bd37e9002d" containerName="manila-db-sync" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.489427 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d47a9735-0b67-4c06-812c-07bd37e9002d" containerName="manila-db-sync" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.490551 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.498191 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.498380 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.498531 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-rcfcx" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.498666 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.527060 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.529273 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.542636 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571343 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571428 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e98f0d31-d277-4641-af76-90f345e51dba-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571503 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571545 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e98f0d31-d277-4641-af76-90f345e51dba-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571618 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-config-data\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571672 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bjwv\" (UniqueName: \"kubernetes.io/projected/e98f0d31-d277-4641-af76-90f345e51dba-kube-api-access-2bjwv\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571705 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-scripts\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.571751 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e98f0d31-d277-4641-af76-90f345e51dba-ceph\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.581757 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.611517 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.664537 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f9f967d5-82cqp"] Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.667962 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.683991 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-scripts\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684057 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-config-data\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684116 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684162 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c60281fa-f2b2-4062-a046-61f47a004598-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684212 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684291 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e98f0d31-d277-4641-af76-90f345e51dba-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684340 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684452 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e98f0d31-d277-4641-af76-90f345e51dba-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684565 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b45tv\" (UniqueName: \"kubernetes.io/projected/c60281fa-f2b2-4062-a046-61f47a004598-kube-api-access-b45tv\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684668 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e98f0d31-d277-4641-af76-90f345e51dba-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684685 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-config-data\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684728 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e98f0d31-d277-4641-af76-90f345e51dba-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684914 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bjwv\" (UniqueName: \"kubernetes.io/projected/e98f0d31-d277-4641-af76-90f345e51dba-kube-api-access-2bjwv\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.684967 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-scripts\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.685044 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e98f0d31-d277-4641-af76-90f345e51dba-ceph\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.691139 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.695099 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.695609 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e98f0d31-d277-4641-af76-90f345e51dba-ceph\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.696674 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-config-data\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.706017 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98f0d31-d277-4641-af76-90f345e51dba-scripts\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.792413 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-scripts\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.836655 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-config-data\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.792572 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f9f967d5-82cqp"] Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.836809 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c60281fa-f2b2-4062-a046-61f47a004598-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.836890 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837113 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837253 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837303 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-dns-svc\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837328 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-sb\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837366 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b45tv\" (UniqueName: \"kubernetes.io/projected/c60281fa-f2b2-4062-a046-61f47a004598-kube-api-access-b45tv\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837507 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfk2x\" (UniqueName: \"kubernetes.io/projected/23837f74-0222-4fe3-bc66-0df3b1026530-kube-api-access-wfk2x\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837564 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-config\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.826704 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bjwv\" (UniqueName: \"kubernetes.io/projected/e98f0d31-d277-4641-af76-90f345e51dba-kube-api-access-2bjwv\") pod \"manila-share-share1-0\" (UID: \"e98f0d31-d277-4641-af76-90f345e51dba\") " pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.837901 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c60281fa-f2b2-4062-a046-61f47a004598-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.846806 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-scripts\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.847519 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.850471 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.857067 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.896357 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c60281fa-f2b2-4062-a046-61f47a004598-config-data\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.907145 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b45tv\" (UniqueName: \"kubernetes.io/projected/c60281fa-f2b2-4062-a046-61f47a004598-kube-api-access-b45tv\") pod \"manila-scheduler-0\" (UID: \"c60281fa-f2b2-4062-a046-61f47a004598\") " pod="openstack/manila-scheduler-0" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.944510 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.944556 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-dns-svc\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.944588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-sb\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.944644 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfk2x\" (UniqueName: \"kubernetes.io/projected/23837f74-0222-4fe3-bc66-0df3b1026530-kube-api-access-wfk2x\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.944669 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-config\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.945788 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-config\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.946301 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-dns-svc\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.946827 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:17 crc kubenswrapper[4811]: I0128 17:32:17.947544 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-sb\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:17.987848 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfk2x\" (UniqueName: \"kubernetes.io/projected/23837f74-0222-4fe3-bc66-0df3b1026530-kube-api-access-wfk2x\") pod \"dnsmasq-dns-6f9f967d5-82cqp\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.164600 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.167809 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.178127 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.179263 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.188954 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.265962 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be0c37b7-c1d7-415e-9dc2-2351411340ca-logs\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.266070 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-config-data\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.266106 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.266136 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pzqp\" (UniqueName: \"kubernetes.io/projected/be0c37b7-c1d7-415e-9dc2-2351411340ca-kube-api-access-8pzqp\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.266155 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-config-data-custom\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.266181 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-scripts\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.266218 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/be0c37b7-c1d7-415e-9dc2-2351411340ca-etc-machine-id\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.286057 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.368858 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/be0c37b7-c1d7-415e-9dc2-2351411340ca-etc-machine-id\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.369039 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be0c37b7-c1d7-415e-9dc2-2351411340ca-logs\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.369172 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-config-data\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.369208 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.369237 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pzqp\" (UniqueName: \"kubernetes.io/projected/be0c37b7-c1d7-415e-9dc2-2351411340ca-kube-api-access-8pzqp\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.369263 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-config-data-custom\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.369291 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-scripts\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.373104 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/be0c37b7-c1d7-415e-9dc2-2351411340ca-etc-machine-id\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.374308 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be0c37b7-c1d7-415e-9dc2-2351411340ca-logs\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.381056 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.381839 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-config-data\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.382051 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-config-data-custom\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.389357 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be0c37b7-c1d7-415e-9dc2-2351411340ca-scripts\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.405492 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pzqp\" (UniqueName: \"kubernetes.io/projected/be0c37b7-c1d7-415e-9dc2-2351411340ca-kube-api-access-8pzqp\") pod \"manila-api-0\" (UID: \"be0c37b7-c1d7-415e-9dc2-2351411340ca\") " pod="openstack/manila-api-0" Jan 28 17:32:18 crc kubenswrapper[4811]: I0128 17:32:18.594069 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jan 28 17:32:19 crc kubenswrapper[4811]: I0128 17:32:19.117523 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f9f967d5-82cqp"] Jan 28 17:32:19 crc kubenswrapper[4811]: I0128 17:32:19.124649 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jan 28 17:32:19 crc kubenswrapper[4811]: I0128 17:32:19.151498 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" event={"ID":"23837f74-0222-4fe3-bc66-0df3b1026530","Type":"ContainerStarted","Data":"59fd988928963643d32df5358fa5877b54decd8fdc77070dbf153e0341b979fa"} Jan 28 17:32:20 crc kubenswrapper[4811]: I0128 17:32:20.161123 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"c60281fa-f2b2-4062-a046-61f47a004598","Type":"ContainerStarted","Data":"89cb2944d03d52a39c1bf269f765b4573bd9129ef1b8efeff78b1999dd2ed2dd"} Jan 28 17:32:20 crc kubenswrapper[4811]: I0128 17:32:20.162951 4811 generic.go:334] "Generic (PLEG): container finished" podID="23837f74-0222-4fe3-bc66-0df3b1026530" containerID="7e1fac28c2dfc8c08102e6edcb9ef5127659ba0f0154377e07d61ff5d1485af6" exitCode=0 Jan 28 17:32:20 crc kubenswrapper[4811]: I0128 17:32:20.163033 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" event={"ID":"23837f74-0222-4fe3-bc66-0df3b1026530","Type":"ContainerDied","Data":"7e1fac28c2dfc8c08102e6edcb9ef5127659ba0f0154377e07d61ff5d1485af6"} Jan 28 17:32:20 crc kubenswrapper[4811]: I0128 17:32:20.209273 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jan 28 17:32:20 crc kubenswrapper[4811]: I0128 17:32:20.340521 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:32:20 crc kubenswrapper[4811]: E0128 17:32:20.341068 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:32:21 crc kubenswrapper[4811]: W0128 17:32:21.121755 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe0c37b7_c1d7_415e_9dc2_2351411340ca.slice/crio-b06df688bc8509ed740b8c112974552f52cb41fdd30b7b293e71e33b957422e1 WatchSource:0}: Error finding container b06df688bc8509ed740b8c112974552f52cb41fdd30b7b293e71e33b957422e1: Status 404 returned error can't find the container with id b06df688bc8509ed740b8c112974552f52cb41fdd30b7b293e71e33b957422e1 Jan 28 17:32:21 crc kubenswrapper[4811]: I0128 17:32:21.123108 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jan 28 17:32:21 crc kubenswrapper[4811]: I0128 17:32:21.197699 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" event={"ID":"23837f74-0222-4fe3-bc66-0df3b1026530","Type":"ContainerStarted","Data":"89ca4d0a0886e46907dde1eb000a5e6d1dff2921820dae44a4f44073178f8a31"} Jan 28 17:32:21 crc kubenswrapper[4811]: I0128 17:32:21.198179 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:21 crc kubenswrapper[4811]: I0128 17:32:21.209463 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"be0c37b7-c1d7-415e-9dc2-2351411340ca","Type":"ContainerStarted","Data":"b06df688bc8509ed740b8c112974552f52cb41fdd30b7b293e71e33b957422e1"} Jan 28 17:32:21 crc kubenswrapper[4811]: I0128 17:32:21.214093 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e98f0d31-d277-4641-af76-90f345e51dba","Type":"ContainerStarted","Data":"b9a93cb64b6262a36f23763a3099d58812ec5f2313e228afdd4a9020334c8c2f"} Jan 28 17:32:21 crc kubenswrapper[4811]: I0128 17:32:21.230191 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" podStartSLOduration=4.230168136 podStartE2EDuration="4.230168136s" podCreationTimestamp="2026-01-28 17:32:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:32:21.221645706 +0000 UTC m=+6433.976009299" watchObservedRunningTime="2026-01-28 17:32:21.230168136 +0000 UTC m=+6433.984531739" Jan 28 17:32:22 crc kubenswrapper[4811]: I0128 17:32:22.225160 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"c60281fa-f2b2-4062-a046-61f47a004598","Type":"ContainerStarted","Data":"bec93b5f8b2ba23a733b3e91f44ddf391276bce5e5e70e341af6da4ed875fb87"} Jan 28 17:32:22 crc kubenswrapper[4811]: I0128 17:32:22.228299 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"be0c37b7-c1d7-415e-9dc2-2351411340ca","Type":"ContainerStarted","Data":"c954588d5a876d269d0a60586c86fc1584a3a077d02211fa8bb4bb1fe96a05f6"} Jan 28 17:32:23 crc kubenswrapper[4811]: I0128 17:32:23.253252 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"be0c37b7-c1d7-415e-9dc2-2351411340ca","Type":"ContainerStarted","Data":"5070be5c4f4ec67a787912f8b404a30a1fec083541eb69076801b8c57a1a52a5"} Jan 28 17:32:23 crc kubenswrapper[4811]: I0128 17:32:23.254626 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Jan 28 17:32:23 crc kubenswrapper[4811]: I0128 17:32:23.258746 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"c60281fa-f2b2-4062-a046-61f47a004598","Type":"ContainerStarted","Data":"0a39381d2d92666c974218ff3eab5c2752605b8f5804b4fbfb64f9dfad8d57f5"} Jan 28 17:32:23 crc kubenswrapper[4811]: I0128 17:32:23.291469 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.291446188 podStartE2EDuration="5.291446188s" podCreationTimestamp="2026-01-28 17:32:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:32:23.272299332 +0000 UTC m=+6436.026662915" watchObservedRunningTime="2026-01-28 17:32:23.291446188 +0000 UTC m=+6436.045809771" Jan 28 17:32:23 crc kubenswrapper[4811]: I0128 17:32:23.306618 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=4.604103392 podStartE2EDuration="6.306590548s" podCreationTimestamp="2026-01-28 17:32:17 +0000 UTC" firstStartedPulling="2026-01-28 17:32:19.144342361 +0000 UTC m=+6431.898705944" lastFinishedPulling="2026-01-28 17:32:20.846829527 +0000 UTC m=+6433.601193100" observedRunningTime="2026-01-28 17:32:23.295696813 +0000 UTC m=+6436.050060406" watchObservedRunningTime="2026-01-28 17:32:23.306590548 +0000 UTC m=+6436.060954131" Jan 28 17:32:26 crc kubenswrapper[4811]: I0128 17:32:26.293139 4811 generic.go:334] "Generic (PLEG): container finished" podID="7c66b660-0464-4966-b9e7-9002f3af9163" containerID="5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1" exitCode=0 Jan 28 17:32:26 crc kubenswrapper[4811]: I0128 17:32:26.293221 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerDied","Data":"5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1"} Jan 28 17:32:28 crc kubenswrapper[4811]: I0128 17:32:28.179685 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Jan 28 17:32:28 crc kubenswrapper[4811]: I0128 17:32:28.287940 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:32:28 crc kubenswrapper[4811]: I0128 17:32:28.627246 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bcb6d7c9-hdsrp"] Jan 28 17:32:28 crc kubenswrapper[4811]: I0128 17:32:28.627562 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" podUID="c71148c2-9145-4207-841f-f4931e513205" containerName="dnsmasq-dns" containerID="cri-o://60cc5aaaf3f8c97762ef3c84c3fd38fb6521e312d568175c1f86c0dede5d2b4a" gracePeriod=10 Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.325360 4811 generic.go:334] "Generic (PLEG): container finished" podID="c71148c2-9145-4207-841f-f4931e513205" containerID="60cc5aaaf3f8c97762ef3c84c3fd38fb6521e312d568175c1f86c0dede5d2b4a" exitCode=0 Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.325472 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" event={"ID":"c71148c2-9145-4207-841f-f4931e513205","Type":"ContainerDied","Data":"60cc5aaaf3f8c97762ef3c84c3fd38fb6521e312d568175c1f86c0dede5d2b4a"} Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.604117 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.751472 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-nb\") pod \"c71148c2-9145-4207-841f-f4931e513205\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.751801 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-dns-svc\") pod \"c71148c2-9145-4207-841f-f4931e513205\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.751921 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-sb\") pod \"c71148c2-9145-4207-841f-f4931e513205\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.752015 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-config\") pod \"c71148c2-9145-4207-841f-f4931e513205\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.752043 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxxd5\" (UniqueName: \"kubernetes.io/projected/c71148c2-9145-4207-841f-f4931e513205-kube-api-access-wxxd5\") pod \"c71148c2-9145-4207-841f-f4931e513205\" (UID: \"c71148c2-9145-4207-841f-f4931e513205\") " Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.762699 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c71148c2-9145-4207-841f-f4931e513205-kube-api-access-wxxd5" (OuterVolumeSpecName: "kube-api-access-wxxd5") pod "c71148c2-9145-4207-841f-f4931e513205" (UID: "c71148c2-9145-4207-841f-f4931e513205"). InnerVolumeSpecName "kube-api-access-wxxd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.834034 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c71148c2-9145-4207-841f-f4931e513205" (UID: "c71148c2-9145-4207-841f-f4931e513205"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.836667 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c71148c2-9145-4207-841f-f4931e513205" (UID: "c71148c2-9145-4207-841f-f4931e513205"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.851887 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-config" (OuterVolumeSpecName: "config") pod "c71148c2-9145-4207-841f-f4931e513205" (UID: "c71148c2-9145-4207-841f-f4931e513205"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.854933 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.854962 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.854974 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.854985 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxxd5\" (UniqueName: \"kubernetes.io/projected/c71148c2-9145-4207-841f-f4931e513205-kube-api-access-wxxd5\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.871352 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c71148c2-9145-4207-841f-f4931e513205" (UID: "c71148c2-9145-4207-841f-f4931e513205"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:32:29 crc kubenswrapper[4811]: I0128 17:32:29.957291 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71148c2-9145-4207-841f-f4931e513205-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.369190 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" event={"ID":"c71148c2-9145-4207-841f-f4931e513205","Type":"ContainerDied","Data":"14dd51fca7e923d47708c7871d95b23c5ceabcbde212dfd1c4d579a1a5be1b3e"} Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.369475 4811 scope.go:117] "RemoveContainer" containerID="60cc5aaaf3f8c97762ef3c84c3fd38fb6521e312d568175c1f86c0dede5d2b4a" Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.369599 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bcb6d7c9-hdsrp" Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.377965 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e98f0d31-d277-4641-af76-90f345e51dba","Type":"ContainerStarted","Data":"7d1a6c356ca77f82d7f4a69236f86160be3ebfee62925690f8cc33ba925f7c76"} Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.392332 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerStarted","Data":"4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995"} Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.439230 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-26j9c" podStartSLOduration=3.925345711 podStartE2EDuration="22.439212904s" podCreationTimestamp="2026-01-28 17:32:08 +0000 UTC" firstStartedPulling="2026-01-28 17:32:11.042512244 +0000 UTC m=+6423.796875827" lastFinishedPulling="2026-01-28 17:32:29.556379437 +0000 UTC m=+6442.310743020" observedRunningTime="2026-01-28 17:32:30.431105225 +0000 UTC m=+6443.185468818" watchObservedRunningTime="2026-01-28 17:32:30.439212904 +0000 UTC m=+6443.193576487" Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.475390 4811 scope.go:117] "RemoveContainer" containerID="d1a6680bd18c4477ea31fa78055a07373055e9f9a05a978ec8fd598122b6bddf" Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.476165 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bcb6d7c9-hdsrp"] Jan 28 17:32:30 crc kubenswrapper[4811]: I0128 17:32:30.500127 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86bcb6d7c9-hdsrp"] Jan 28 17:32:31 crc kubenswrapper[4811]: I0128 17:32:31.340225 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:32:31 crc kubenswrapper[4811]: E0128 17:32:31.340830 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:32:31 crc kubenswrapper[4811]: I0128 17:32:31.406543 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e98f0d31-d277-4641-af76-90f345e51dba","Type":"ContainerStarted","Data":"4b92268adf853c8752dde6ecf9ecba9ab2c70ce80e1e01ee632dc72f735b604a"} Jan 28 17:32:31 crc kubenswrapper[4811]: I0128 17:32:31.446692 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=5.35411248 podStartE2EDuration="14.446673909s" podCreationTimestamp="2026-01-28 17:32:17 +0000 UTC" firstStartedPulling="2026-01-28 17:32:20.260886724 +0000 UTC m=+6433.015250307" lastFinishedPulling="2026-01-28 17:32:29.353448153 +0000 UTC m=+6442.107811736" observedRunningTime="2026-01-28 17:32:31.439182557 +0000 UTC m=+6444.193546170" watchObservedRunningTime="2026-01-28 17:32:31.446673909 +0000 UTC m=+6444.201037492" Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.016684 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.017069 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-central-agent" containerID="cri-o://21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3" gracePeriod=30 Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.017624 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="sg-core" containerID="cri-o://7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff" gracePeriod=30 Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.017818 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="proxy-httpd" containerID="cri-o://93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0" gracePeriod=30 Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.017926 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-notification-agent" containerID="cri-o://e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1" gracePeriod=30 Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.353175 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c71148c2-9145-4207-841f-f4931e513205" path="/var/lib/kubelet/pods/c71148c2-9145-4207-841f-f4931e513205/volumes" Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.417871 4811 generic.go:334] "Generic (PLEG): container finished" podID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerID="93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0" exitCode=0 Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.418982 4811 generic.go:334] "Generic (PLEG): container finished" podID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerID="7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff" exitCode=2 Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.417914 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerDied","Data":"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0"} Jan 28 17:32:32 crc kubenswrapper[4811]: I0128 17:32:32.419193 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerDied","Data":"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff"} Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.044143 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151216 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-run-httpd\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151272 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-log-httpd\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151323 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-scripts\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151341 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6mnj\" (UniqueName: \"kubernetes.io/projected/0605a185-82b4-481b-bbd7-55f0f8be336a-kube-api-access-b6mnj\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151511 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-sg-core-conf-yaml\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151560 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-combined-ca-bundle\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.151662 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-config-data\") pod \"0605a185-82b4-481b-bbd7-55f0f8be336a\" (UID: \"0605a185-82b4-481b-bbd7-55f0f8be336a\") " Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.157581 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-scripts" (OuterVolumeSpecName: "scripts") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.157818 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.157992 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.161616 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0605a185-82b4-481b-bbd7-55f0f8be336a-kube-api-access-b6mnj" (OuterVolumeSpecName: "kube-api-access-b6mnj") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "kube-api-access-b6mnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.207209 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.254358 4811 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.254398 4811 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.254409 4811 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0605a185-82b4-481b-bbd7-55f0f8be336a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.254420 4811 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-scripts\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.254451 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6mnj\" (UniqueName: \"kubernetes.io/projected/0605a185-82b4-481b-bbd7-55f0f8be336a-kube-api-access-b6mnj\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.278027 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.309199 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-config-data" (OuterVolumeSpecName: "config-data") pod "0605a185-82b4-481b-bbd7-55f0f8be336a" (UID: "0605a185-82b4-481b-bbd7-55f0f8be336a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.356599 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.356632 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0605a185-82b4-481b-bbd7-55f0f8be336a-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.431925 4811 generic.go:334] "Generic (PLEG): container finished" podID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerID="e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1" exitCode=0 Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.431959 4811 generic.go:334] "Generic (PLEG): container finished" podID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerID="21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3" exitCode=0 Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.431982 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerDied","Data":"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1"} Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.432010 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerDied","Data":"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3"} Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.432012 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.432037 4811 scope.go:117] "RemoveContainer" containerID="93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.432025 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0605a185-82b4-481b-bbd7-55f0f8be336a","Type":"ContainerDied","Data":"604d9ed0bfaff3f1176e82108c67fa24228cd0f87019e8e2b2cf5298ec11c47f"} Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.468608 4811 scope.go:117] "RemoveContainer" containerID="7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.492516 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.512567 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.525683 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.526207 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="proxy-httpd" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.526228 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="proxy-httpd" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.526244 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-notification-agent" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.526252 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-notification-agent" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.526283 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-central-agent" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.526291 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-central-agent" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.526309 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71148c2-9145-4207-841f-f4931e513205" containerName="init" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.526316 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71148c2-9145-4207-841f-f4931e513205" containerName="init" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.526502 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="sg-core" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.526515 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="sg-core" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.526531 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71148c2-9145-4207-841f-f4931e513205" containerName="dnsmasq-dns" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.526541 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71148c2-9145-4207-841f-f4931e513205" containerName="dnsmasq-dns" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.528040 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-central-agent" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.528084 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="proxy-httpd" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.528109 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="sg-core" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.528132 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" containerName="ceilometer-notification-agent" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.528146 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c71148c2-9145-4207-841f-f4931e513205" containerName="dnsmasq-dns" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.533752 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.535778 4811 scope.go:117] "RemoveContainer" containerID="e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.539171 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.539350 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.541810 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.573692 4811 scope.go:117] "RemoveContainer" containerID="21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.602990 4811 scope.go:117] "RemoveContainer" containerID="93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.603911 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0\": container with ID starting with 93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0 not found: ID does not exist" containerID="93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.603943 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0"} err="failed to get container status \"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0\": rpc error: code = NotFound desc = could not find container \"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0\": container with ID starting with 93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0 not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.603969 4811 scope.go:117] "RemoveContainer" containerID="7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.604526 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff\": container with ID starting with 7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff not found: ID does not exist" containerID="7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.604580 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff"} err="failed to get container status \"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff\": rpc error: code = NotFound desc = could not find container \"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff\": container with ID starting with 7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.604614 4811 scope.go:117] "RemoveContainer" containerID="e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.604924 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1\": container with ID starting with e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1 not found: ID does not exist" containerID="e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.604948 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1"} err="failed to get container status \"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1\": rpc error: code = NotFound desc = could not find container \"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1\": container with ID starting with e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1 not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.604965 4811 scope.go:117] "RemoveContainer" containerID="21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3" Jan 28 17:32:33 crc kubenswrapper[4811]: E0128 17:32:33.605153 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3\": container with ID starting with 21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3 not found: ID does not exist" containerID="21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605173 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3"} err="failed to get container status \"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3\": rpc error: code = NotFound desc = could not find container \"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3\": container with ID starting with 21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3 not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605184 4811 scope.go:117] "RemoveContainer" containerID="93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605380 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0"} err="failed to get container status \"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0\": rpc error: code = NotFound desc = could not find container \"93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0\": container with ID starting with 93019c3d06784ba07e48a879a5ba7af774e83c32b2aca767f30ede7417600ac0 not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605399 4811 scope.go:117] "RemoveContainer" containerID="7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605657 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff"} err="failed to get container status \"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff\": rpc error: code = NotFound desc = could not find container \"7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff\": container with ID starting with 7bb168727e7fe79a7fc1401a09e96906f79980de59145bbf79c707fced87f1ff not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605678 4811 scope.go:117] "RemoveContainer" containerID="e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.605991 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1"} err="failed to get container status \"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1\": rpc error: code = NotFound desc = could not find container \"e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1\": container with ID starting with e48b88cbe30770ee1396310ed59d3a59cc30ffaf637c5737521cb527a19a37b1 not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.606016 4811 scope.go:117] "RemoveContainer" containerID="21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.606185 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3"} err="failed to get container status \"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3\": rpc error: code = NotFound desc = could not find container \"21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3\": container with ID starting with 21a2faa2df26e1d62d280372aca8995472dcd1fe20923eed71d0d6056abbc8d3 not found: ID does not exist" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.663668 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-config-data\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.664040 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-run-httpd\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.665633 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.666000 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-scripts\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.666145 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-log-httpd\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.666184 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.666237 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpkh7\" (UniqueName: \"kubernetes.io/projected/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-kube-api-access-tpkh7\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768213 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-scripts\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768296 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-log-httpd\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768319 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768361 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpkh7\" (UniqueName: \"kubernetes.io/projected/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-kube-api-access-tpkh7\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768445 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-config-data\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768488 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-run-httpd\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.768584 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.769272 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-run-httpd\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.769366 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-log-httpd\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.774130 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.774237 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-scripts\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.774757 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.775513 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-config-data\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.786732 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpkh7\" (UniqueName: \"kubernetes.io/projected/840f9dd7-e272-4eca-8c71-8b8b27a34ef3-kube-api-access-tpkh7\") pod \"ceilometer-0\" (UID: \"840f9dd7-e272-4eca-8c71-8b8b27a34ef3\") " pod="openstack/ceilometer-0" Jan 28 17:32:33 crc kubenswrapper[4811]: I0128 17:32:33.852287 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 28 17:32:34 crc kubenswrapper[4811]: I0128 17:32:34.352373 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0605a185-82b4-481b-bbd7-55f0f8be336a" path="/var/lib/kubelet/pods/0605a185-82b4-481b-bbd7-55f0f8be336a/volumes" Jan 28 17:32:34 crc kubenswrapper[4811]: I0128 17:32:34.380098 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 28 17:32:34 crc kubenswrapper[4811]: W0128 17:32:34.396397 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod840f9dd7_e272_4eca_8c71_8b8b27a34ef3.slice/crio-90e2c67e5bd3a3b7eea480976e8089274289f6b69eae4e9632612a290e9af8b7 WatchSource:0}: Error finding container 90e2c67e5bd3a3b7eea480976e8089274289f6b69eae4e9632612a290e9af8b7: Status 404 returned error can't find the container with id 90e2c67e5bd3a3b7eea480976e8089274289f6b69eae4e9632612a290e9af8b7 Jan 28 17:32:34 crc kubenswrapper[4811]: I0128 17:32:34.443218 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"840f9dd7-e272-4eca-8c71-8b8b27a34ef3","Type":"ContainerStarted","Data":"90e2c67e5bd3a3b7eea480976e8089274289f6b69eae4e9632612a290e9af8b7"} Jan 28 17:32:36 crc kubenswrapper[4811]: I0128 17:32:36.464668 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"840f9dd7-e272-4eca-8c71-8b8b27a34ef3","Type":"ContainerStarted","Data":"61f48fbee7c78d6f6530c01d98fb0733ea99c0fa46006c1e7e88c852f85a74d2"} Jan 28 17:32:37 crc kubenswrapper[4811]: I0128 17:32:37.481520 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"840f9dd7-e272-4eca-8c71-8b8b27a34ef3","Type":"ContainerStarted","Data":"9203babf1b6d35d003b9196e0d0b75547656bb8209ce5b6c42096890e28f8cb6"} Jan 28 17:32:37 crc kubenswrapper[4811]: I0128 17:32:37.851547 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Jan 28 17:32:38 crc kubenswrapper[4811]: I0128 17:32:38.675163 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:38 crc kubenswrapper[4811]: I0128 17:32:38.675442 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:32:39 crc kubenswrapper[4811]: I0128 17:32:39.501084 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"840f9dd7-e272-4eca-8c71-8b8b27a34ef3","Type":"ContainerStarted","Data":"37ccd555b0774d4c5ac2a7a1455b843782846addc1ef6679e033a10b20272c70"} Jan 28 17:32:39 crc kubenswrapper[4811]: I0128 17:32:39.734183 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" probeResult="failure" output=< Jan 28 17:32:39 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:32:39 crc kubenswrapper[4811]: > Jan 28 17:32:40 crc kubenswrapper[4811]: I0128 17:32:40.152732 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Jan 28 17:32:40 crc kubenswrapper[4811]: I0128 17:32:40.417752 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Jan 28 17:32:44 crc kubenswrapper[4811]: I0128 17:32:44.339614 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:32:44 crc kubenswrapper[4811]: E0128 17:32:44.340354 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:32:44 crc kubenswrapper[4811]: I0128 17:32:44.554665 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"840f9dd7-e272-4eca-8c71-8b8b27a34ef3","Type":"ContainerStarted","Data":"4745fd5dd4cd138feacec366011fa201e4474f269da34a65bea881c9a15cb697"} Jan 28 17:32:44 crc kubenswrapper[4811]: I0128 17:32:44.554972 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 28 17:32:44 crc kubenswrapper[4811]: I0128 17:32:44.586259 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.739659959 podStartE2EDuration="11.586240921s" podCreationTimestamp="2026-01-28 17:32:33 +0000 UTC" firstStartedPulling="2026-01-28 17:32:34.399088393 +0000 UTC m=+6447.153451976" lastFinishedPulling="2026-01-28 17:32:43.245669355 +0000 UTC m=+6456.000032938" observedRunningTime="2026-01-28 17:32:44.582583582 +0000 UTC m=+6457.336947195" watchObservedRunningTime="2026-01-28 17:32:44.586240921 +0000 UTC m=+6457.340604504" Jan 28 17:32:49 crc kubenswrapper[4811]: I0128 17:32:49.687891 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Jan 28 17:32:49 crc kubenswrapper[4811]: I0128 17:32:49.721259 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" probeResult="failure" output=< Jan 28 17:32:49 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:32:49 crc kubenswrapper[4811]: > Jan 28 17:32:58 crc kubenswrapper[4811]: I0128 17:32:58.348574 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:32:58 crc kubenswrapper[4811]: E0128 17:32:58.349406 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:32:59 crc kubenswrapper[4811]: I0128 17:32:59.736745 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" probeResult="failure" output=< Jan 28 17:32:59 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:32:59 crc kubenswrapper[4811]: > Jan 28 17:33:03 crc kubenswrapper[4811]: I0128 17:33:03.858358 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 28 17:33:09 crc kubenswrapper[4811]: I0128 17:33:09.723232 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" probeResult="failure" output=< Jan 28 17:33:09 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:33:09 crc kubenswrapper[4811]: > Jan 28 17:33:10 crc kubenswrapper[4811]: I0128 17:33:10.340039 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:33:10 crc kubenswrapper[4811]: E0128 17:33:10.340584 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:33:19 crc kubenswrapper[4811]: I0128 17:33:19.726267 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" probeResult="failure" output=< Jan 28 17:33:19 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:33:19 crc kubenswrapper[4811]: > Jan 28 17:33:22 crc kubenswrapper[4811]: I0128 17:33:22.339642 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:33:22 crc kubenswrapper[4811]: E0128 17:33:22.340543 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:33:29 crc kubenswrapper[4811]: I0128 17:33:29.724511 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" probeResult="failure" output=< Jan 28 17:33:29 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:33:29 crc kubenswrapper[4811]: > Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.046723 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b99fbc5d5-6q4lt"] Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.052996 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.055299 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.063330 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b99fbc5d5-6q4lt"] Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.144106 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb2b4\" (UniqueName: \"kubernetes.io/projected/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-kube-api-access-sb2b4\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.144167 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-nb\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.144318 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-dns-svc\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.144540 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-sb\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.144723 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-config\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.144775 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-openstack-cell1\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.246332 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb2b4\" (UniqueName: \"kubernetes.io/projected/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-kube-api-access-sb2b4\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.246391 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-nb\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.246455 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-dns-svc\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.246512 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-sb\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.246567 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-config\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.246596 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-openstack-cell1\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.247612 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-openstack-cell1\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.247667 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-nb\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.247700 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-sb\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.247700 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-dns-svc\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.247836 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-config\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.267702 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb2b4\" (UniqueName: \"kubernetes.io/projected/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-kube-api-access-sb2b4\") pod \"dnsmasq-dns-7b99fbc5d5-6q4lt\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.376585 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:32 crc kubenswrapper[4811]: I0128 17:33:32.900072 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b99fbc5d5-6q4lt"] Jan 28 17:33:33 crc kubenswrapper[4811]: I0128 17:33:33.006183 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" event={"ID":"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd","Type":"ContainerStarted","Data":"0124170dc0bb36244ec6f35a7219f4b5c21285316e33fa529ad7ff0463a08613"} Jan 28 17:33:34 crc kubenswrapper[4811]: I0128 17:33:34.017975 4811 generic.go:334] "Generic (PLEG): container finished" podID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerID="5ac74cb2fbe6e2eb6b4bed63960306ce721da646ffadf37079a636d796e9e48e" exitCode=0 Jan 28 17:33:34 crc kubenswrapper[4811]: I0128 17:33:34.018082 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" event={"ID":"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd","Type":"ContainerDied","Data":"5ac74cb2fbe6e2eb6b4bed63960306ce721da646ffadf37079a636d796e9e48e"} Jan 28 17:33:34 crc kubenswrapper[4811]: I0128 17:33:34.340497 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:33:34 crc kubenswrapper[4811]: E0128 17:33:34.341318 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:33:35 crc kubenswrapper[4811]: I0128 17:33:35.032143 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" event={"ID":"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd","Type":"ContainerStarted","Data":"d651edd9f369ba17e88bae5001c44e34a4fb0c496fe8fd10209c2dbfaab7d298"} Jan 28 17:33:35 crc kubenswrapper[4811]: I0128 17:33:35.032512 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:35 crc kubenswrapper[4811]: I0128 17:33:35.061962 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" podStartSLOduration=3.061943191 podStartE2EDuration="3.061943191s" podCreationTimestamp="2026-01-28 17:33:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:33:35.050112011 +0000 UTC m=+6507.804475614" watchObservedRunningTime="2026-01-28 17:33:35.061943191 +0000 UTC m=+6507.816306774" Jan 28 17:33:38 crc kubenswrapper[4811]: I0128 17:33:38.721719 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:33:38 crc kubenswrapper[4811]: I0128 17:33:38.779482 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:33:39 crc kubenswrapper[4811]: I0128 17:33:39.567661 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-26j9c"] Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.077698 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-26j9c" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" containerID="cri-o://4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995" gracePeriod=2 Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.594645 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.642875 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-utilities\") pod \"7c66b660-0464-4966-b9e7-9002f3af9163\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.643004 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-catalog-content\") pod \"7c66b660-0464-4966-b9e7-9002f3af9163\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.643148 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk69l\" (UniqueName: \"kubernetes.io/projected/7c66b660-0464-4966-b9e7-9002f3af9163-kube-api-access-kk69l\") pod \"7c66b660-0464-4966-b9e7-9002f3af9163\" (UID: \"7c66b660-0464-4966-b9e7-9002f3af9163\") " Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.650392 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c66b660-0464-4966-b9e7-9002f3af9163-kube-api-access-kk69l" (OuterVolumeSpecName: "kube-api-access-kk69l") pod "7c66b660-0464-4966-b9e7-9002f3af9163" (UID: "7c66b660-0464-4966-b9e7-9002f3af9163"). InnerVolumeSpecName "kube-api-access-kk69l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.652794 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-utilities" (OuterVolumeSpecName: "utilities") pod "7c66b660-0464-4966-b9e7-9002f3af9163" (UID: "7c66b660-0464-4966-b9e7-9002f3af9163"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.746400 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk69l\" (UniqueName: \"kubernetes.io/projected/7c66b660-0464-4966-b9e7-9002f3af9163-kube-api-access-kk69l\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.746463 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.770797 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c66b660-0464-4966-b9e7-9002f3af9163" (UID: "7c66b660-0464-4966-b9e7-9002f3af9163"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:33:40 crc kubenswrapper[4811]: I0128 17:33:40.849534 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c66b660-0464-4966-b9e7-9002f3af9163-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.092165 4811 generic.go:334] "Generic (PLEG): container finished" podID="7c66b660-0464-4966-b9e7-9002f3af9163" containerID="4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995" exitCode=0 Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.092242 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-26j9c" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.092261 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerDied","Data":"4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995"} Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.093269 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-26j9c" event={"ID":"7c66b660-0464-4966-b9e7-9002f3af9163","Type":"ContainerDied","Data":"4da762ac20e3b8d5804ab9b6ca2a7ccd6357bc428ac92ddcb1956b41dbbd5dad"} Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.093290 4811 scope.go:117] "RemoveContainer" containerID="4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.125715 4811 scope.go:117] "RemoveContainer" containerID="5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.135472 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-26j9c"] Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.154074 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-26j9c"] Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.168967 4811 scope.go:117] "RemoveContainer" containerID="4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.212527 4811 scope.go:117] "RemoveContainer" containerID="4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995" Jan 28 17:33:41 crc kubenswrapper[4811]: E0128 17:33:41.213694 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995\": container with ID starting with 4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995 not found: ID does not exist" containerID="4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.213747 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995"} err="failed to get container status \"4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995\": rpc error: code = NotFound desc = could not find container \"4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995\": container with ID starting with 4fbcf4cecc5e79e081598107ec4f89c682e98c12a1a93fbc46f95fb067230995 not found: ID does not exist" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.213778 4811 scope.go:117] "RemoveContainer" containerID="5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1" Jan 28 17:33:41 crc kubenswrapper[4811]: E0128 17:33:41.214246 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1\": container with ID starting with 5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1 not found: ID does not exist" containerID="5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.214280 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1"} err="failed to get container status \"5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1\": rpc error: code = NotFound desc = could not find container \"5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1\": container with ID starting with 5414853f490e8a87c10c3d89a154ca1570c654860c27b517cfb1c4c4d29a80b1 not found: ID does not exist" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.214301 4811 scope.go:117] "RemoveContainer" containerID="4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712" Jan 28 17:33:41 crc kubenswrapper[4811]: E0128 17:33:41.214776 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712\": container with ID starting with 4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712 not found: ID does not exist" containerID="4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712" Jan 28 17:33:41 crc kubenswrapper[4811]: I0128 17:33:41.214809 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712"} err="failed to get container status \"4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712\": rpc error: code = NotFound desc = could not find container \"4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712\": container with ID starting with 4d3d6c41768033b4931eda844fdcdf735110d55f699ad68c2a5b8b1853e4d712 not found: ID does not exist" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.353730 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" path="/var/lib/kubelet/pods/7c66b660-0464-4966-b9e7-9002f3af9163/volumes" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.378408 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.443354 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f9f967d5-82cqp"] Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.443605 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="dnsmasq-dns" containerID="cri-o://89ca4d0a0886e46907dde1eb000a5e6d1dff2921820dae44a4f44073178f8a31" gracePeriod=10 Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.613347 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-858dc4dc9f-qwrkt"] Jan 28 17:33:42 crc kubenswrapper[4811]: E0128 17:33:42.613836 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.613854 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" Jan 28 17:33:42 crc kubenswrapper[4811]: E0128 17:33:42.613882 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="extract-content" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.613889 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="extract-content" Jan 28 17:33:42 crc kubenswrapper[4811]: E0128 17:33:42.613912 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="extract-utilities" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.613917 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="extract-utilities" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.614124 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c66b660-0464-4966-b9e7-9002f3af9163" containerName="registry-server" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.615294 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.629679 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-858dc4dc9f-qwrkt"] Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.691772 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-ovsdbserver-nb\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.691920 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-config\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.692017 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fjq7\" (UniqueName: \"kubernetes.io/projected/53564505-e54d-4d5f-84ba-fae0c13c89cb-kube-api-access-7fjq7\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.692060 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-openstack-cell1\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.692226 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-ovsdbserver-sb\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.692274 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-dns-svc\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.794215 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-ovsdbserver-nb\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.794306 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-config\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.794337 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fjq7\" (UniqueName: \"kubernetes.io/projected/53564505-e54d-4d5f-84ba-fae0c13c89cb-kube-api-access-7fjq7\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.794360 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-openstack-cell1\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.794416 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-ovsdbserver-sb\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.794454 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-dns-svc\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.795134 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-config\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.795165 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-ovsdbserver-nb\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.795271 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-dns-svc\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.795721 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-openstack-cell1\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.795740 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53564505-e54d-4d5f-84ba-fae0c13c89cb-ovsdbserver-sb\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:42 crc kubenswrapper[4811]: I0128 17:33:42.817832 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fjq7\" (UniqueName: \"kubernetes.io/projected/53564505-e54d-4d5f-84ba-fae0c13c89cb-kube-api-access-7fjq7\") pod \"dnsmasq-dns-858dc4dc9f-qwrkt\" (UID: \"53564505-e54d-4d5f-84ba-fae0c13c89cb\") " pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.005446 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.126544 4811 generic.go:334] "Generic (PLEG): container finished" podID="23837f74-0222-4fe3-bc66-0df3b1026530" containerID="89ca4d0a0886e46907dde1eb000a5e6d1dff2921820dae44a4f44073178f8a31" exitCode=0 Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.127005 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" event={"ID":"23837f74-0222-4fe3-bc66-0df3b1026530","Type":"ContainerDied","Data":"89ca4d0a0886e46907dde1eb000a5e6d1dff2921820dae44a4f44073178f8a31"} Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.287523 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.148:5353: connect: connection refused" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.526454 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-858dc4dc9f-qwrkt"] Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.719737 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.830220 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-dns-svc\") pod \"23837f74-0222-4fe3-bc66-0df3b1026530\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.830280 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-sb\") pod \"23837f74-0222-4fe3-bc66-0df3b1026530\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.830347 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-config\") pod \"23837f74-0222-4fe3-bc66-0df3b1026530\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.830386 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb\") pod \"23837f74-0222-4fe3-bc66-0df3b1026530\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.830595 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfk2x\" (UniqueName: \"kubernetes.io/projected/23837f74-0222-4fe3-bc66-0df3b1026530-kube-api-access-wfk2x\") pod \"23837f74-0222-4fe3-bc66-0df3b1026530\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.840244 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23837f74-0222-4fe3-bc66-0df3b1026530-kube-api-access-wfk2x" (OuterVolumeSpecName: "kube-api-access-wfk2x") pod "23837f74-0222-4fe3-bc66-0df3b1026530" (UID: "23837f74-0222-4fe3-bc66-0df3b1026530"). InnerVolumeSpecName "kube-api-access-wfk2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.932139 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "23837f74-0222-4fe3-bc66-0df3b1026530" (UID: "23837f74-0222-4fe3-bc66-0df3b1026530"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.932491 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb\") pod \"23837f74-0222-4fe3-bc66-0df3b1026530\" (UID: \"23837f74-0222-4fe3-bc66-0df3b1026530\") " Jan 28 17:33:43 crc kubenswrapper[4811]: W0128 17:33:43.932832 4811 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/23837f74-0222-4fe3-bc66-0df3b1026530/volumes/kubernetes.io~configmap/ovsdbserver-nb Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.932869 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "23837f74-0222-4fe3-bc66-0df3b1026530" (UID: "23837f74-0222-4fe3-bc66-0df3b1026530"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.933678 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfk2x\" (UniqueName: \"kubernetes.io/projected/23837f74-0222-4fe3-bc66-0df3b1026530-kube-api-access-wfk2x\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.933710 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.937342 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "23837f74-0222-4fe3-bc66-0df3b1026530" (UID: "23837f74-0222-4fe3-bc66-0df3b1026530"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.941165 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-config" (OuterVolumeSpecName: "config") pod "23837f74-0222-4fe3-bc66-0df3b1026530" (UID: "23837f74-0222-4fe3-bc66-0df3b1026530"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:43 crc kubenswrapper[4811]: I0128 17:33:43.948574 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "23837f74-0222-4fe3-bc66-0df3b1026530" (UID: "23837f74-0222-4fe3-bc66-0df3b1026530"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.036216 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.036265 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.036279 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23837f74-0222-4fe3-bc66-0df3b1026530-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.137384 4811 generic.go:334] "Generic (PLEG): container finished" podID="53564505-e54d-4d5f-84ba-fae0c13c89cb" containerID="ea804122d99b0bd0b687786889ad8cb55c6c8110de20b5038f180062cdbb6bf7" exitCode=0 Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.137445 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" event={"ID":"53564505-e54d-4d5f-84ba-fae0c13c89cb","Type":"ContainerDied","Data":"ea804122d99b0bd0b687786889ad8cb55c6c8110de20b5038f180062cdbb6bf7"} Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.137495 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" event={"ID":"53564505-e54d-4d5f-84ba-fae0c13c89cb","Type":"ContainerStarted","Data":"191f51254348b3108333e70d7eba5ec4e531782df328f4dfa1d3133ce8656a0e"} Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.142731 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" event={"ID":"23837f74-0222-4fe3-bc66-0df3b1026530","Type":"ContainerDied","Data":"59fd988928963643d32df5358fa5877b54decd8fdc77070dbf153e0341b979fa"} Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.142786 4811 scope.go:117] "RemoveContainer" containerID="89ca4d0a0886e46907dde1eb000a5e6d1dff2921820dae44a4f44073178f8a31" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.142928 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f9f967d5-82cqp" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.184424 4811 scope.go:117] "RemoveContainer" containerID="7e1fac28c2dfc8c08102e6edcb9ef5127659ba0f0154377e07d61ff5d1485af6" Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.192613 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f9f967d5-82cqp"] Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.205407 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f9f967d5-82cqp"] Jan 28 17:33:44 crc kubenswrapper[4811]: I0128 17:33:44.357315 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" path="/var/lib/kubelet/pods/23837f74-0222-4fe3-bc66-0df3b1026530/volumes" Jan 28 17:33:45 crc kubenswrapper[4811]: I0128 17:33:45.154693 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" event={"ID":"53564505-e54d-4d5f-84ba-fae0c13c89cb","Type":"ContainerStarted","Data":"3e0ec51ee2d4461506837b6107b111bb8f57f9d86467f455e9ae198333200d51"} Jan 28 17:33:45 crc kubenswrapper[4811]: I0128 17:33:45.155135 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:45 crc kubenswrapper[4811]: I0128 17:33:45.176491 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" podStartSLOduration=3.176465718 podStartE2EDuration="3.176465718s" podCreationTimestamp="2026-01-28 17:33:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:33:45.173319632 +0000 UTC m=+6517.927683205" watchObservedRunningTime="2026-01-28 17:33:45.176465718 +0000 UTC m=+6517.930829301" Jan 28 17:33:48 crc kubenswrapper[4811]: I0128 17:33:48.346019 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:33:48 crc kubenswrapper[4811]: E0128 17:33:48.346770 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.006729 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-858dc4dc9f-qwrkt" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.093212 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b99fbc5d5-6q4lt"] Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.093916 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerName="dnsmasq-dns" containerID="cri-o://d651edd9f369ba17e88bae5001c44e34a4fb0c496fe8fd10209c2dbfaab7d298" gracePeriod=10 Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.240797 4811 generic.go:334] "Generic (PLEG): container finished" podID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerID="d651edd9f369ba17e88bae5001c44e34a4fb0c496fe8fd10209c2dbfaab7d298" exitCode=0 Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.240854 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" event={"ID":"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd","Type":"ContainerDied","Data":"d651edd9f369ba17e88bae5001c44e34a4fb0c496fe8fd10209c2dbfaab7d298"} Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.731643 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.807607 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-dns-svc\") pod \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.807700 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-openstack-cell1\") pod \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.807788 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb2b4\" (UniqueName: \"kubernetes.io/projected/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-kube-api-access-sb2b4\") pod \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.807880 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-config\") pod \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.807973 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-nb\") pod \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.808021 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-sb\") pod \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\" (UID: \"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd\") " Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.819290 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-kube-api-access-sb2b4" (OuterVolumeSpecName: "kube-api-access-sb2b4") pod "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" (UID: "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd"). InnerVolumeSpecName "kube-api-access-sb2b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.877716 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" (UID: "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.886311 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-config" (OuterVolumeSpecName: "config") pod "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" (UID: "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.892824 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" (UID: "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.893479 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" (UID: "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.910894 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.911180 4811 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.911257 4811 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.911330 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb2b4\" (UniqueName: \"kubernetes.io/projected/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-kube-api-access-sb2b4\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.911411 4811 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-config\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:53 crc kubenswrapper[4811]: I0128 17:33:53.911625 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" (UID: "e8879fad-c29b-43a3-a9a7-155c7d4f3ffd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.013804 4811 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.254784 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" event={"ID":"e8879fad-c29b-43a3-a9a7-155c7d4f3ffd","Type":"ContainerDied","Data":"0124170dc0bb36244ec6f35a7219f4b5c21285316e33fa529ad7ff0463a08613"} Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.254889 4811 scope.go:117] "RemoveContainer" containerID="d651edd9f369ba17e88bae5001c44e34a4fb0c496fe8fd10209c2dbfaab7d298" Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.255197 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99fbc5d5-6q4lt" Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.303925 4811 scope.go:117] "RemoveContainer" containerID="5ac74cb2fbe6e2eb6b4bed63960306ce721da646ffadf37079a636d796e9e48e" Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.315216 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b99fbc5d5-6q4lt"] Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.325938 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b99fbc5d5-6q4lt"] Jan 28 17:33:54 crc kubenswrapper[4811]: I0128 17:33:54.359963 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" path="/var/lib/kubelet/pods/e8879fad-c29b-43a3-a9a7-155c7d4f3ffd/volumes" Jan 28 17:33:59 crc kubenswrapper[4811]: I0128 17:33:59.339693 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:33:59 crc kubenswrapper[4811]: E0128 17:33:59.340593 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.674778 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6"] Jan 28 17:34:03 crc kubenswrapper[4811]: E0128 17:34:03.676024 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerName="dnsmasq-dns" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.676044 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerName="dnsmasq-dns" Jan 28 17:34:03 crc kubenswrapper[4811]: E0128 17:34:03.676056 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="init" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.676064 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="init" Jan 28 17:34:03 crc kubenswrapper[4811]: E0128 17:34:03.676089 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="dnsmasq-dns" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.676098 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="dnsmasq-dns" Jan 28 17:34:03 crc kubenswrapper[4811]: E0128 17:34:03.676131 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerName="init" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.676138 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerName="init" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.683848 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8879fad-c29b-43a3-a9a7-155c7d4f3ffd" containerName="dnsmasq-dns" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.683956 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="23837f74-0222-4fe3-bc66-0df3b1026530" containerName="dnsmasq-dns" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.685473 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.689588 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.690031 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.690382 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.690598 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.704182 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6"] Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.733636 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxh5k\" (UniqueName: \"kubernetes.io/projected/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-kube-api-access-zxh5k\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.733692 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.733723 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.733792 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.733900 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.836308 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.836419 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxh5k\" (UniqueName: \"kubernetes.io/projected/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-kube-api-access-zxh5k\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.836477 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.836513 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.836601 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.842618 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.842656 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.843694 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.843994 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:03 crc kubenswrapper[4811]: I0128 17:34:03.857766 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxh5k\" (UniqueName: \"kubernetes.io/projected/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-kube-api-access-zxh5k\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:04 crc kubenswrapper[4811]: I0128 17:34:04.026156 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:04 crc kubenswrapper[4811]: I0128 17:34:04.625533 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6"] Jan 28 17:34:04 crc kubenswrapper[4811]: W0128 17:34:04.631178 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4d54df3_1c51_49c0_af4a_0f02bd87d0e0.slice/crio-2b6505fcc858fd28058948dad3633efc756729c447a53ebb2d4a9cda11c16c5a WatchSource:0}: Error finding container 2b6505fcc858fd28058948dad3633efc756729c447a53ebb2d4a9cda11c16c5a: Status 404 returned error can't find the container with id 2b6505fcc858fd28058948dad3633efc756729c447a53ebb2d4a9cda11c16c5a Jan 28 17:34:05 crc kubenswrapper[4811]: I0128 17:34:05.364099 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" event={"ID":"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0","Type":"ContainerStarted","Data":"2b6505fcc858fd28058948dad3633efc756729c447a53ebb2d4a9cda11c16c5a"} Jan 28 17:34:11 crc kubenswrapper[4811]: I0128 17:34:11.339784 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:34:11 crc kubenswrapper[4811]: E0128 17:34:11.340713 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:34:13 crc kubenswrapper[4811]: I0128 17:34:13.615061 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:34:14 crc kubenswrapper[4811]: I0128 17:34:14.482805 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" event={"ID":"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0","Type":"ContainerStarted","Data":"45bfa8544a2e6a399348822417c837dc9e56c6ee3fe6024ce8d7a382fbbbef60"} Jan 28 17:34:14 crc kubenswrapper[4811]: I0128 17:34:14.516995 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" podStartSLOduration=2.5385180590000003 podStartE2EDuration="11.516932765s" podCreationTimestamp="2026-01-28 17:34:03 +0000 UTC" firstStartedPulling="2026-01-28 17:34:04.633562225 +0000 UTC m=+6537.387925798" lastFinishedPulling="2026-01-28 17:34:13.611976921 +0000 UTC m=+6546.366340504" observedRunningTime="2026-01-28 17:34:14.502195846 +0000 UTC m=+6547.256559459" watchObservedRunningTime="2026-01-28 17:34:14.516932765 +0000 UTC m=+6547.271296368" Jan 28 17:34:22 crc kubenswrapper[4811]: I0128 17:34:22.339826 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:34:22 crc kubenswrapper[4811]: E0128 17:34:22.340800 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:34:27 crc kubenswrapper[4811]: I0128 17:34:27.614377 4811 generic.go:334] "Generic (PLEG): container finished" podID="b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" containerID="45bfa8544a2e6a399348822417c837dc9e56c6ee3fe6024ce8d7a382fbbbef60" exitCode=0 Jan 28 17:34:27 crc kubenswrapper[4811]: I0128 17:34:27.614456 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" event={"ID":"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0","Type":"ContainerDied","Data":"45bfa8544a2e6a399348822417c837dc9e56c6ee3fe6024ce8d7a382fbbbef60"} Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.047467 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-bpbqs"] Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.059215 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-bpbqs"] Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.140454 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.274605 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ceph\") pod \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.274694 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-pre-adoption-validation-combined-ca-bundle\") pod \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.274846 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxh5k\" (UniqueName: \"kubernetes.io/projected/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-kube-api-access-zxh5k\") pod \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.274870 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ssh-key-openstack-cell1\") pod \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.275010 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-inventory\") pod \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\" (UID: \"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0\") " Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.280801 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ceph" (OuterVolumeSpecName: "ceph") pod "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" (UID: "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.280845 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" (UID: "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.281249 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-kube-api-access-zxh5k" (OuterVolumeSpecName: "kube-api-access-zxh5k") pod "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" (UID: "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0"). InnerVolumeSpecName "kube-api-access-zxh5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.306966 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-inventory" (OuterVolumeSpecName: "inventory") pod "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" (UID: "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.316204 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" (UID: "b4d54df3-1c51-49c0-af4a-0f02bd87d0e0"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.377707 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.377745 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.377756 4811 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.377767 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxh5k\" (UniqueName: \"kubernetes.io/projected/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-kube-api-access-zxh5k\") on node \"crc\" DevicePath \"\"" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.377777 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b4d54df3-1c51-49c0-af4a-0f02bd87d0e0-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.638556 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" event={"ID":"b4d54df3-1c51-49c0-af4a-0f02bd87d0e0","Type":"ContainerDied","Data":"2b6505fcc858fd28058948dad3633efc756729c447a53ebb2d4a9cda11c16c5a"} Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.638606 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b6505fcc858fd28058948dad3633efc756729c447a53ebb2d4a9cda11c16c5a" Jan 28 17:34:29 crc kubenswrapper[4811]: I0128 17:34:29.638988 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6" Jan 28 17:34:30 crc kubenswrapper[4811]: I0128 17:34:30.051064 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-5e0a-account-create-update-jd6sd"] Jan 28 17:34:30 crc kubenswrapper[4811]: I0128 17:34:30.061342 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-5e0a-account-create-update-jd6sd"] Jan 28 17:34:30 crc kubenswrapper[4811]: I0128 17:34:30.351035 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="407ae7f9-9fcc-4b3d-be4d-9bd254e95034" path="/var/lib/kubelet/pods/407ae7f9-9fcc-4b3d-be4d-9bd254e95034/volumes" Jan 28 17:34:30 crc kubenswrapper[4811]: I0128 17:34:30.353404 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42475e6d-089d-42b4-9036-b625afeeb9fa" path="/var/lib/kubelet/pods/42475e6d-089d-42b4-9036-b625afeeb9fa/volumes" Jan 28 17:34:34 crc kubenswrapper[4811]: I0128 17:34:34.339861 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:34:34 crc kubenswrapper[4811]: E0128 17:34:34.340674 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:34:34 crc kubenswrapper[4811]: I0128 17:34:34.525957 4811 scope.go:117] "RemoveContainer" containerID="074c3bae02f7dce5a65ab1bf568832256a8b7a80534b483c11151f66feaafc13" Jan 28 17:34:34 crc kubenswrapper[4811]: I0128 17:34:34.570375 4811 scope.go:117] "RemoveContainer" containerID="05784ba60a13e0781da8c52929cd672b3550c7f37982f1804445fdd5a7ac9875" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.029990 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-zgmpv"] Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.041252 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-zgmpv"] Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.122073 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb"] Jan 28 17:34:36 crc kubenswrapper[4811]: E0128 17:34:36.122668 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.122689 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.122955 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d54df3-1c51-49c0-af4a-0f02bd87d0e0" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.123855 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.126123 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.126568 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.126722 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.126882 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.148160 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb"] Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.246252 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.246334 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.246366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.246558 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.246878 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2swpk\" (UniqueName: \"kubernetes.io/projected/69e817eb-c2f6-4173-be32-04233e446173-kube-api-access-2swpk\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.348156 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2swpk\" (UniqueName: \"kubernetes.io/projected/69e817eb-c2f6-4173-be32-04233e446173-kube-api-access-2swpk\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.348270 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.348312 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.348339 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.348396 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.355355 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.355620 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.356333 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.357854 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.364900 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4be526f-8d55-4d2c-bed8-08ac750c4df1" path="/var/lib/kubelet/pods/c4be526f-8d55-4d2c-bed8-08ac750c4df1/volumes" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.369660 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2swpk\" (UniqueName: \"kubernetes.io/projected/69e817eb-c2f6-4173-be32-04233e446173-kube-api-access-2swpk\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:36 crc kubenswrapper[4811]: I0128 17:34:36.449739 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:34:37 crc kubenswrapper[4811]: I0128 17:34:37.043746 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-625d-account-create-update-px8pw"] Jan 28 17:34:37 crc kubenswrapper[4811]: I0128 17:34:37.062302 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-625d-account-create-update-px8pw"] Jan 28 17:34:37 crc kubenswrapper[4811]: I0128 17:34:37.144752 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb"] Jan 28 17:34:37 crc kubenswrapper[4811]: I0128 17:34:37.724550 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" event={"ID":"69e817eb-c2f6-4173-be32-04233e446173","Type":"ContainerStarted","Data":"f640f9284d51542b9a7b4864b25517d186fa48d41412afa8dbfa5a6aba1fdf78"} Jan 28 17:34:38 crc kubenswrapper[4811]: I0128 17:34:38.372082 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc230ae4-d6f3-4bd8-99e1-9107b216450d" path="/var/lib/kubelet/pods/cc230ae4-d6f3-4bd8-99e1-9107b216450d/volumes" Jan 28 17:34:38 crc kubenswrapper[4811]: I0128 17:34:38.741709 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" event={"ID":"69e817eb-c2f6-4173-be32-04233e446173","Type":"ContainerStarted","Data":"c19992a59b5fc0e2384f016de8d51b6d3dd46af92881d2308d54779edf866f8c"} Jan 28 17:34:38 crc kubenswrapper[4811]: I0128 17:34:38.784275 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" podStartSLOduration=2.249799777 podStartE2EDuration="2.78424942s" podCreationTimestamp="2026-01-28 17:34:36 +0000 UTC" firstStartedPulling="2026-01-28 17:34:37.15235336 +0000 UTC m=+6569.906716943" lastFinishedPulling="2026-01-28 17:34:37.686802983 +0000 UTC m=+6570.441166586" observedRunningTime="2026-01-28 17:34:38.769930433 +0000 UTC m=+6571.524294026" watchObservedRunningTime="2026-01-28 17:34:38.78424942 +0000 UTC m=+6571.538613013" Jan 28 17:34:47 crc kubenswrapper[4811]: I0128 17:34:47.341309 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:34:47 crc kubenswrapper[4811]: E0128 17:34:47.342214 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:35:01 crc kubenswrapper[4811]: I0128 17:35:01.339882 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:35:01 crc kubenswrapper[4811]: E0128 17:35:01.340716 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:35:14 crc kubenswrapper[4811]: I0128 17:35:14.340306 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:35:14 crc kubenswrapper[4811]: E0128 17:35:14.341376 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:35:22 crc kubenswrapper[4811]: I0128 17:35:22.055058 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-24c7x"] Jan 28 17:35:22 crc kubenswrapper[4811]: I0128 17:35:22.070915 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-24c7x"] Jan 28 17:35:22 crc kubenswrapper[4811]: I0128 17:35:22.356930 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dbe153f-0524-407e-8a1a-61cce4f22eeb" path="/var/lib/kubelet/pods/2dbe153f-0524-407e-8a1a-61cce4f22eeb/volumes" Jan 28 17:35:25 crc kubenswrapper[4811]: I0128 17:35:25.340750 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:35:25 crc kubenswrapper[4811]: E0128 17:35:25.341863 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:35:35 crc kubenswrapper[4811]: I0128 17:35:34.821885 4811 scope.go:117] "RemoveContainer" containerID="d0d71533591f8c9b33e17be37c791ff186731024c0e172743e5a8dd360d3ff1a" Jan 28 17:35:35 crc kubenswrapper[4811]: I0128 17:35:35.268032 4811 scope.go:117] "RemoveContainer" containerID="ada503a25046c67f1294b9915a959cd74bc9f8e6728e6813efbe18172c232f7c" Jan 28 17:35:35 crc kubenswrapper[4811]: I0128 17:35:35.434018 4811 scope.go:117] "RemoveContainer" containerID="e27135a2724e31bd3b95d4467b3d0608c863bd2f663c90259f22139577819d5c" Jan 28 17:35:35 crc kubenswrapper[4811]: I0128 17:35:35.508498 4811 scope.go:117] "RemoveContainer" containerID="336d9036bbade2da460be404e5af8997e17f8a9c55b035942ef38930391755a1" Jan 28 17:35:40 crc kubenswrapper[4811]: I0128 17:35:40.339323 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:35:41 crc kubenswrapper[4811]: I0128 17:35:41.416355 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"2008bc8938819da747e2484f03f5e9a84bd57754f7787484853ede70e0f45bd7"} Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.207511 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-97hsw"] Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.230079 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97hsw"] Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.230219 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.333228 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8vrc\" (UniqueName: \"kubernetes.io/projected/91e246ed-680a-4456-b610-72030336b942-kube-api-access-n8vrc\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.333662 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-catalog-content\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.333908 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-utilities\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.435813 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-utilities\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.437560 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-utilities\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.438125 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8vrc\" (UniqueName: \"kubernetes.io/projected/91e246ed-680a-4456-b610-72030336b942-kube-api-access-n8vrc\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.439256 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-catalog-content\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.439685 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-catalog-content\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.474684 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8vrc\" (UniqueName: \"kubernetes.io/projected/91e246ed-680a-4456-b610-72030336b942-kube-api-access-n8vrc\") pod \"certified-operators-97hsw\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:21 crc kubenswrapper[4811]: I0128 17:36:21.559597 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:22 crc kubenswrapper[4811]: I0128 17:36:22.210904 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97hsw"] Jan 28 17:36:22 crc kubenswrapper[4811]: I0128 17:36:22.829648 4811 generic.go:334] "Generic (PLEG): container finished" podID="91e246ed-680a-4456-b610-72030336b942" containerID="d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018" exitCode=0 Jan 28 17:36:22 crc kubenswrapper[4811]: I0128 17:36:22.829727 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerDied","Data":"d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018"} Jan 28 17:36:22 crc kubenswrapper[4811]: I0128 17:36:22.829929 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerStarted","Data":"115e5ea6cd420788a669eb8ed34b54af5c2beafa6eb38f64b076cfb34c969de0"} Jan 28 17:36:22 crc kubenswrapper[4811]: I0128 17:36:22.832210 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:36:24 crc kubenswrapper[4811]: I0128 17:36:24.850900 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerStarted","Data":"d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6"} Jan 28 17:36:25 crc kubenswrapper[4811]: I0128 17:36:25.861211 4811 generic.go:334] "Generic (PLEG): container finished" podID="91e246ed-680a-4456-b610-72030336b942" containerID="d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6" exitCode=0 Jan 28 17:36:25 crc kubenswrapper[4811]: I0128 17:36:25.861265 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerDied","Data":"d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6"} Jan 28 17:36:26 crc kubenswrapper[4811]: I0128 17:36:26.874284 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerStarted","Data":"8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01"} Jan 28 17:36:26 crc kubenswrapper[4811]: I0128 17:36:26.902211 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-97hsw" podStartSLOduration=2.37535305 podStartE2EDuration="5.902192375s" podCreationTimestamp="2026-01-28 17:36:21 +0000 UTC" firstStartedPulling="2026-01-28 17:36:22.831981936 +0000 UTC m=+6675.586345519" lastFinishedPulling="2026-01-28 17:36:26.358821261 +0000 UTC m=+6679.113184844" observedRunningTime="2026-01-28 17:36:26.892273057 +0000 UTC m=+6679.646636660" watchObservedRunningTime="2026-01-28 17:36:26.902192375 +0000 UTC m=+6679.656555958" Jan 28 17:36:31 crc kubenswrapper[4811]: I0128 17:36:31.560122 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:31 crc kubenswrapper[4811]: I0128 17:36:31.560755 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:31 crc kubenswrapper[4811]: I0128 17:36:31.638839 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:31 crc kubenswrapper[4811]: I0128 17:36:31.964045 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:32 crc kubenswrapper[4811]: I0128 17:36:32.014216 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-97hsw"] Jan 28 17:36:33 crc kubenswrapper[4811]: I0128 17:36:33.936934 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-97hsw" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="registry-server" containerID="cri-o://8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01" gracePeriod=2 Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.495491 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.664956 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-utilities\") pod \"91e246ed-680a-4456-b610-72030336b942\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.665011 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-catalog-content\") pod \"91e246ed-680a-4456-b610-72030336b942\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.665152 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8vrc\" (UniqueName: \"kubernetes.io/projected/91e246ed-680a-4456-b610-72030336b942-kube-api-access-n8vrc\") pod \"91e246ed-680a-4456-b610-72030336b942\" (UID: \"91e246ed-680a-4456-b610-72030336b942\") " Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.666326 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-utilities" (OuterVolumeSpecName: "utilities") pod "91e246ed-680a-4456-b610-72030336b942" (UID: "91e246ed-680a-4456-b610-72030336b942"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.687727 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e246ed-680a-4456-b610-72030336b942-kube-api-access-n8vrc" (OuterVolumeSpecName: "kube-api-access-n8vrc") pod "91e246ed-680a-4456-b610-72030336b942" (UID: "91e246ed-680a-4456-b610-72030336b942"). InnerVolumeSpecName "kube-api-access-n8vrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.713001 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91e246ed-680a-4456-b610-72030336b942" (UID: "91e246ed-680a-4456-b610-72030336b942"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.767953 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.768003 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e246ed-680a-4456-b610-72030336b942-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.768024 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8vrc\" (UniqueName: \"kubernetes.io/projected/91e246ed-680a-4456-b610-72030336b942-kube-api-access-n8vrc\") on node \"crc\" DevicePath \"\"" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.952831 4811 generic.go:334] "Generic (PLEG): container finished" podID="91e246ed-680a-4456-b610-72030336b942" containerID="8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01" exitCode=0 Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.952896 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97hsw" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.952948 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerDied","Data":"8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01"} Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.954782 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97hsw" event={"ID":"91e246ed-680a-4456-b610-72030336b942","Type":"ContainerDied","Data":"115e5ea6cd420788a669eb8ed34b54af5c2beafa6eb38f64b076cfb34c969de0"} Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.954814 4811 scope.go:117] "RemoveContainer" containerID="8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01" Jan 28 17:36:34 crc kubenswrapper[4811]: I0128 17:36:34.979523 4811 scope.go:117] "RemoveContainer" containerID="d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:34.998751 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-97hsw"] Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.007856 4811 scope.go:117] "RemoveContainer" containerID="d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.011305 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-97hsw"] Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.059557 4811 scope.go:117] "RemoveContainer" containerID="8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01" Jan 28 17:36:35 crc kubenswrapper[4811]: E0128 17:36:35.061782 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01\": container with ID starting with 8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01 not found: ID does not exist" containerID="8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.061840 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01"} err="failed to get container status \"8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01\": rpc error: code = NotFound desc = could not find container \"8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01\": container with ID starting with 8ed18e01385d0f0c7e49bcf7d48101135d572ec109bafa9e1612d14a8d549d01 not found: ID does not exist" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.061868 4811 scope.go:117] "RemoveContainer" containerID="d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6" Jan 28 17:36:35 crc kubenswrapper[4811]: E0128 17:36:35.062331 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6\": container with ID starting with d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6 not found: ID does not exist" containerID="d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.062352 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6"} err="failed to get container status \"d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6\": rpc error: code = NotFound desc = could not find container \"d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6\": container with ID starting with d93f61fb7c1e9b1ecd2c51b3427c1e02e8aa78ac4e5b517e17111ff949ecf2f6 not found: ID does not exist" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.062367 4811 scope.go:117] "RemoveContainer" containerID="d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018" Jan 28 17:36:35 crc kubenswrapper[4811]: E0128 17:36:35.062809 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018\": container with ID starting with d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018 not found: ID does not exist" containerID="d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018" Jan 28 17:36:35 crc kubenswrapper[4811]: I0128 17:36:35.062833 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018"} err="failed to get container status \"d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018\": rpc error: code = NotFound desc = could not find container \"d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018\": container with ID starting with d91c8f6560a4560088dd7dbe8d71dc180d1938f3c7b1267f191160b1013a3018 not found: ID does not exist" Jan 28 17:36:36 crc kubenswrapper[4811]: I0128 17:36:36.353405 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e246ed-680a-4456-b610-72030336b942" path="/var/lib/kubelet/pods/91e246ed-680a-4456-b610-72030336b942/volumes" Jan 28 17:38:00 crc kubenswrapper[4811]: I0128 17:38:00.045199 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-pw6st"] Jan 28 17:38:00 crc kubenswrapper[4811]: I0128 17:38:00.060299 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-7fc9-account-create-update-bmtgf"] Jan 28 17:38:00 crc kubenswrapper[4811]: I0128 17:38:00.071651 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-pw6st"] Jan 28 17:38:00 crc kubenswrapper[4811]: I0128 17:38:00.080763 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-7fc9-account-create-update-bmtgf"] Jan 28 17:38:00 crc kubenswrapper[4811]: I0128 17:38:00.352070 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1659b4b2-066e-4aa2-a334-c52f6b132080" path="/var/lib/kubelet/pods/1659b4b2-066e-4aa2-a334-c52f6b132080/volumes" Jan 28 17:38:00 crc kubenswrapper[4811]: I0128 17:38:00.352661 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="278a8293-6eb7-4409-9881-e320c037f063" path="/var/lib/kubelet/pods/278a8293-6eb7-4409-9881-e320c037f063/volumes" Jan 28 17:38:03 crc kubenswrapper[4811]: I0128 17:38:03.087227 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:38:03 crc kubenswrapper[4811]: I0128 17:38:03.087565 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:38:18 crc kubenswrapper[4811]: I0128 17:38:18.046974 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-qzbhp"] Jan 28 17:38:18 crc kubenswrapper[4811]: I0128 17:38:18.056131 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-qzbhp"] Jan 28 17:38:18 crc kubenswrapper[4811]: I0128 17:38:18.504339 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3eb0e86-9952-4769-b877-e76c7c12b8e2" path="/var/lib/kubelet/pods/e3eb0e86-9952-4769-b877-e76c7c12b8e2/volumes" Jan 28 17:38:33 crc kubenswrapper[4811]: I0128 17:38:33.087129 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:38:33 crc kubenswrapper[4811]: I0128 17:38:33.087699 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:38:36 crc kubenswrapper[4811]: I0128 17:38:36.274727 4811 scope.go:117] "RemoveContainer" containerID="1ebaabecb6ad8e582d0e3121bd7ea25081ebff8ae0c839b0cc0480f9840dba88" Jan 28 17:38:36 crc kubenswrapper[4811]: I0128 17:38:36.304320 4811 scope.go:117] "RemoveContainer" containerID="b5b1d6b98aee5ce31b406e35b2e119378db2b38576d390f062ba1d37e922fde0" Jan 28 17:38:36 crc kubenswrapper[4811]: I0128 17:38:36.376063 4811 scope.go:117] "RemoveContainer" containerID="26f0cb8d8a616ead906239adf1f4466d5e1db5b635596fa410df2755a48aefb5" Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.087836 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.089160 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.089303 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.090331 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2008bc8938819da747e2484f03f5e9a84bd57754f7787484853ede70e0f45bd7"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.090414 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://2008bc8938819da747e2484f03f5e9a84bd57754f7787484853ede70e0f45bd7" gracePeriod=600 Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.480075 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="2008bc8938819da747e2484f03f5e9a84bd57754f7787484853ede70e0f45bd7" exitCode=0 Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.480192 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"2008bc8938819da747e2484f03f5e9a84bd57754f7787484853ede70e0f45bd7"} Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.480413 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7"} Jan 28 17:39:03 crc kubenswrapper[4811]: I0128 17:39:03.480451 4811 scope.go:117] "RemoveContainer" containerID="c8f31485a8ab10a9c157ae14e2cb2f7e512fa0e9a55fee158caf88525ec54b35" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.630086 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6q4mm"] Jan 28 17:40:38 crc kubenswrapper[4811]: E0128 17:40:38.631284 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="extract-utilities" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.631306 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="extract-utilities" Jan 28 17:40:38 crc kubenswrapper[4811]: E0128 17:40:38.631323 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="registry-server" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.631333 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="registry-server" Jan 28 17:40:38 crc kubenswrapper[4811]: E0128 17:40:38.631347 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="extract-content" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.631354 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="extract-content" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.631698 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e246ed-680a-4456-b610-72030336b942" containerName="registry-server" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.633923 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.646384 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6q4mm"] Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.772927 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-utilities\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.772985 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-catalog-content\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.773162 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khzbd\" (UniqueName: \"kubernetes.io/projected/7245ae6b-c852-4379-b449-d8020c8df3be-kube-api-access-khzbd\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.875688 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khzbd\" (UniqueName: \"kubernetes.io/projected/7245ae6b-c852-4379-b449-d8020c8df3be-kube-api-access-khzbd\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.876200 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-utilities\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.876226 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-catalog-content\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.876909 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-catalog-content\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.877153 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-utilities\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.901791 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khzbd\" (UniqueName: \"kubernetes.io/projected/7245ae6b-c852-4379-b449-d8020c8df3be-kube-api-access-khzbd\") pod \"redhat-marketplace-6q4mm\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:38 crc kubenswrapper[4811]: I0128 17:40:38.977550 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:39 crc kubenswrapper[4811]: I0128 17:40:39.541548 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6q4mm"] Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.511299 4811 generic.go:334] "Generic (PLEG): container finished" podID="7245ae6b-c852-4379-b449-d8020c8df3be" containerID="12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa" exitCode=0 Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.511486 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6q4mm" event={"ID":"7245ae6b-c852-4379-b449-d8020c8df3be","Type":"ContainerDied","Data":"12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa"} Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.511950 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6q4mm" event={"ID":"7245ae6b-c852-4379-b449-d8020c8df3be","Type":"ContainerStarted","Data":"da3feb279bba63341275488349ebce61bee7f8b8d568e8d9d695ca8fb4dd463e"} Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.822633 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-89dhj"] Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.825294 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.860921 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-89dhj"] Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.924267 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n2jk\" (UniqueName: \"kubernetes.io/projected/f3c46619-f2da-4c61-ae02-463abe5d433a-kube-api-access-8n2jk\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.924342 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-catalog-content\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:40 crc kubenswrapper[4811]: I0128 17:40:40.924797 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-utilities\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.027128 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-utilities\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.027277 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n2jk\" (UniqueName: \"kubernetes.io/projected/f3c46619-f2da-4c61-ae02-463abe5d433a-kube-api-access-8n2jk\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.027306 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-catalog-content\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.028327 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-catalog-content\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.028854 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-utilities\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.064078 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n2jk\" (UniqueName: \"kubernetes.io/projected/f3c46619-f2da-4c61-ae02-463abe5d433a-kube-api-access-8n2jk\") pod \"community-operators-89dhj\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.158801 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:41 crc kubenswrapper[4811]: I0128 17:40:41.776308 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-89dhj"] Jan 28 17:40:42 crc kubenswrapper[4811]: I0128 17:40:42.556187 4811 generic.go:334] "Generic (PLEG): container finished" podID="7245ae6b-c852-4379-b449-d8020c8df3be" containerID="7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db" exitCode=0 Jan 28 17:40:42 crc kubenswrapper[4811]: I0128 17:40:42.556390 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6q4mm" event={"ID":"7245ae6b-c852-4379-b449-d8020c8df3be","Type":"ContainerDied","Data":"7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db"} Jan 28 17:40:42 crc kubenswrapper[4811]: I0128 17:40:42.561152 4811 generic.go:334] "Generic (PLEG): container finished" podID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerID="311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3" exitCode=0 Jan 28 17:40:42 crc kubenswrapper[4811]: I0128 17:40:42.561307 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerDied","Data":"311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3"} Jan 28 17:40:42 crc kubenswrapper[4811]: I0128 17:40:42.561477 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerStarted","Data":"e35f47ad3786406106a4cf328027b67c94f57a42cf75b68b5731714344095e59"} Jan 28 17:40:44 crc kubenswrapper[4811]: I0128 17:40:44.595071 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6q4mm" event={"ID":"7245ae6b-c852-4379-b449-d8020c8df3be","Type":"ContainerStarted","Data":"02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883"} Jan 28 17:40:44 crc kubenswrapper[4811]: I0128 17:40:44.599619 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerStarted","Data":"fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5"} Jan 28 17:40:44 crc kubenswrapper[4811]: I0128 17:40:44.629054 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6q4mm" podStartSLOduration=3.697382923 podStartE2EDuration="6.629035846s" podCreationTimestamp="2026-01-28 17:40:38 +0000 UTC" firstStartedPulling="2026-01-28 17:40:40.513307795 +0000 UTC m=+6933.267671378" lastFinishedPulling="2026-01-28 17:40:43.444960718 +0000 UTC m=+6936.199324301" observedRunningTime="2026-01-28 17:40:44.619663382 +0000 UTC m=+6937.374026965" watchObservedRunningTime="2026-01-28 17:40:44.629035846 +0000 UTC m=+6937.383399429" Jan 28 17:40:47 crc kubenswrapper[4811]: I0128 17:40:47.628870 4811 generic.go:334] "Generic (PLEG): container finished" podID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerID="fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5" exitCode=0 Jan 28 17:40:47 crc kubenswrapper[4811]: I0128 17:40:47.628944 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerDied","Data":"fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5"} Jan 28 17:40:48 crc kubenswrapper[4811]: I0128 17:40:48.641623 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerStarted","Data":"14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0"} Jan 28 17:40:48 crc kubenswrapper[4811]: I0128 17:40:48.667764 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-89dhj" podStartSLOduration=3.105925186 podStartE2EDuration="8.667741464s" podCreationTimestamp="2026-01-28 17:40:40 +0000 UTC" firstStartedPulling="2026-01-28 17:40:42.56468172 +0000 UTC m=+6935.319045303" lastFinishedPulling="2026-01-28 17:40:48.126497998 +0000 UTC m=+6940.880861581" observedRunningTime="2026-01-28 17:40:48.65871253 +0000 UTC m=+6941.413076113" watchObservedRunningTime="2026-01-28 17:40:48.667741464 +0000 UTC m=+6941.422105047" Jan 28 17:40:48 crc kubenswrapper[4811]: I0128 17:40:48.977749 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:48 crc kubenswrapper[4811]: I0128 17:40:48.978134 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:49 crc kubenswrapper[4811]: I0128 17:40:49.043276 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:49 crc kubenswrapper[4811]: I0128 17:40:49.708149 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:50 crc kubenswrapper[4811]: I0128 17:40:50.406617 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6q4mm"] Jan 28 17:40:51 crc kubenswrapper[4811]: I0128 17:40:51.159543 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:51 crc kubenswrapper[4811]: I0128 17:40:51.159855 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:51 crc kubenswrapper[4811]: I0128 17:40:51.208372 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:40:51 crc kubenswrapper[4811]: I0128 17:40:51.684840 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6q4mm" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="registry-server" containerID="cri-o://02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883" gracePeriod=2 Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.383897 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.500332 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khzbd\" (UniqueName: \"kubernetes.io/projected/7245ae6b-c852-4379-b449-d8020c8df3be-kube-api-access-khzbd\") pod \"7245ae6b-c852-4379-b449-d8020c8df3be\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.500552 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-utilities\") pod \"7245ae6b-c852-4379-b449-d8020c8df3be\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.500711 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-catalog-content\") pod \"7245ae6b-c852-4379-b449-d8020c8df3be\" (UID: \"7245ae6b-c852-4379-b449-d8020c8df3be\") " Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.501511 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-utilities" (OuterVolumeSpecName: "utilities") pod "7245ae6b-c852-4379-b449-d8020c8df3be" (UID: "7245ae6b-c852-4379-b449-d8020c8df3be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.512248 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7245ae6b-c852-4379-b449-d8020c8df3be-kube-api-access-khzbd" (OuterVolumeSpecName: "kube-api-access-khzbd") pod "7245ae6b-c852-4379-b449-d8020c8df3be" (UID: "7245ae6b-c852-4379-b449-d8020c8df3be"). InnerVolumeSpecName "kube-api-access-khzbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.525836 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7245ae6b-c852-4379-b449-d8020c8df3be" (UID: "7245ae6b-c852-4379-b449-d8020c8df3be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.603867 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.603904 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7245ae6b-c852-4379-b449-d8020c8df3be-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.603915 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khzbd\" (UniqueName: \"kubernetes.io/projected/7245ae6b-c852-4379-b449-d8020c8df3be-kube-api-access-khzbd\") on node \"crc\" DevicePath \"\"" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.695747 4811 generic.go:334] "Generic (PLEG): container finished" podID="7245ae6b-c852-4379-b449-d8020c8df3be" containerID="02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883" exitCode=0 Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.695799 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6q4mm" event={"ID":"7245ae6b-c852-4379-b449-d8020c8df3be","Type":"ContainerDied","Data":"02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883"} Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.696055 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6q4mm" event={"ID":"7245ae6b-c852-4379-b449-d8020c8df3be","Type":"ContainerDied","Data":"da3feb279bba63341275488349ebce61bee7f8b8d568e8d9d695ca8fb4dd463e"} Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.696077 4811 scope.go:117] "RemoveContainer" containerID="02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.695820 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6q4mm" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.724568 4811 scope.go:117] "RemoveContainer" containerID="7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.732091 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6q4mm"] Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.743312 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6q4mm"] Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.756346 4811 scope.go:117] "RemoveContainer" containerID="12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.792443 4811 scope.go:117] "RemoveContainer" containerID="02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883" Jan 28 17:40:52 crc kubenswrapper[4811]: E0128 17:40:52.792852 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883\": container with ID starting with 02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883 not found: ID does not exist" containerID="02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.792891 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883"} err="failed to get container status \"02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883\": rpc error: code = NotFound desc = could not find container \"02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883\": container with ID starting with 02ad699f1a8f932f10fc6053a5134d3a25e89b8b88f599824fd703bf4fdeb883 not found: ID does not exist" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.792918 4811 scope.go:117] "RemoveContainer" containerID="7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db" Jan 28 17:40:52 crc kubenswrapper[4811]: E0128 17:40:52.793439 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db\": container with ID starting with 7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db not found: ID does not exist" containerID="7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.793475 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db"} err="failed to get container status \"7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db\": rpc error: code = NotFound desc = could not find container \"7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db\": container with ID starting with 7ce99ae460979bb98a1f7b6fc26e4a179045d36bacd27169fb47f6a99aebf0db not found: ID does not exist" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.793502 4811 scope.go:117] "RemoveContainer" containerID="12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa" Jan 28 17:40:52 crc kubenswrapper[4811]: E0128 17:40:52.794099 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa\": container with ID starting with 12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa not found: ID does not exist" containerID="12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa" Jan 28 17:40:52 crc kubenswrapper[4811]: I0128 17:40:52.794124 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa"} err="failed to get container status \"12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa\": rpc error: code = NotFound desc = could not find container \"12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa\": container with ID starting with 12cb1eabd6e1d28abc6bbfa3b8b7699b3e6b1c5da3e66ca1611e2716153456aa not found: ID does not exist" Jan 28 17:40:54 crc kubenswrapper[4811]: I0128 17:40:54.352203 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" path="/var/lib/kubelet/pods/7245ae6b-c852-4379-b449-d8020c8df3be/volumes" Jan 28 17:41:01 crc kubenswrapper[4811]: I0128 17:41:01.206878 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:41:01 crc kubenswrapper[4811]: I0128 17:41:01.257810 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-89dhj"] Jan 28 17:41:01 crc kubenswrapper[4811]: I0128 17:41:01.802111 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-89dhj" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="registry-server" containerID="cri-o://14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0" gracePeriod=2 Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.343130 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.414691 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n2jk\" (UniqueName: \"kubernetes.io/projected/f3c46619-f2da-4c61-ae02-463abe5d433a-kube-api-access-8n2jk\") pod \"f3c46619-f2da-4c61-ae02-463abe5d433a\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.415014 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-catalog-content\") pod \"f3c46619-f2da-4c61-ae02-463abe5d433a\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.415097 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-utilities\") pod \"f3c46619-f2da-4c61-ae02-463abe5d433a\" (UID: \"f3c46619-f2da-4c61-ae02-463abe5d433a\") " Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.415766 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-utilities" (OuterVolumeSpecName: "utilities") pod "f3c46619-f2da-4c61-ae02-463abe5d433a" (UID: "f3c46619-f2da-4c61-ae02-463abe5d433a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.422122 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3c46619-f2da-4c61-ae02-463abe5d433a-kube-api-access-8n2jk" (OuterVolumeSpecName: "kube-api-access-8n2jk") pod "f3c46619-f2da-4c61-ae02-463abe5d433a" (UID: "f3c46619-f2da-4c61-ae02-463abe5d433a"). InnerVolumeSpecName "kube-api-access-8n2jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.478087 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3c46619-f2da-4c61-ae02-463abe5d433a" (UID: "f3c46619-f2da-4c61-ae02-463abe5d433a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.519688 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.519944 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3c46619-f2da-4c61-ae02-463abe5d433a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.520039 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n2jk\" (UniqueName: \"kubernetes.io/projected/f3c46619-f2da-4c61-ae02-463abe5d433a-kube-api-access-8n2jk\") on node \"crc\" DevicePath \"\"" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.814057 4811 generic.go:334] "Generic (PLEG): container finished" podID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerID="14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0" exitCode=0 Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.814106 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89dhj" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.814108 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerDied","Data":"14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0"} Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.814163 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89dhj" event={"ID":"f3c46619-f2da-4c61-ae02-463abe5d433a","Type":"ContainerDied","Data":"e35f47ad3786406106a4cf328027b67c94f57a42cf75b68b5731714344095e59"} Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.814186 4811 scope.go:117] "RemoveContainer" containerID="14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.837112 4811 scope.go:117] "RemoveContainer" containerID="fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.855768 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-89dhj"] Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.860244 4811 scope.go:117] "RemoveContainer" containerID="311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.865559 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-89dhj"] Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.914099 4811 scope.go:117] "RemoveContainer" containerID="14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0" Jan 28 17:41:02 crc kubenswrapper[4811]: E0128 17:41:02.914822 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0\": container with ID starting with 14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0 not found: ID does not exist" containerID="14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.914871 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0"} err="failed to get container status \"14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0\": rpc error: code = NotFound desc = could not find container \"14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0\": container with ID starting with 14560c67c1cd386b488de39e235ef10277bd62b8289af59f8ce0de37f2df3bf0 not found: ID does not exist" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.914899 4811 scope.go:117] "RemoveContainer" containerID="fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5" Jan 28 17:41:02 crc kubenswrapper[4811]: E0128 17:41:02.915517 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5\": container with ID starting with fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5 not found: ID does not exist" containerID="fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.915540 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5"} err="failed to get container status \"fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5\": rpc error: code = NotFound desc = could not find container \"fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5\": container with ID starting with fe3795f29c1937ff82ebf90e47b41ea0223903e482e20ba5bb65cf4102d459b5 not found: ID does not exist" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.915558 4811 scope.go:117] "RemoveContainer" containerID="311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3" Jan 28 17:41:02 crc kubenswrapper[4811]: E0128 17:41:02.915911 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3\": container with ID starting with 311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3 not found: ID does not exist" containerID="311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3" Jan 28 17:41:02 crc kubenswrapper[4811]: I0128 17:41:02.916036 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3"} err="failed to get container status \"311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3\": rpc error: code = NotFound desc = could not find container \"311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3\": container with ID starting with 311b3e5c54f733a39c6b98b95c41bc71c1d058929cf5eee31a77ecb686bfa4b3 not found: ID does not exist" Jan 28 17:41:03 crc kubenswrapper[4811]: I0128 17:41:03.087955 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:41:03 crc kubenswrapper[4811]: I0128 17:41:03.088350 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:41:04 crc kubenswrapper[4811]: I0128 17:41:04.355545 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" path="/var/lib/kubelet/pods/f3c46619-f2da-4c61-ae02-463abe5d433a/volumes" Jan 28 17:41:18 crc kubenswrapper[4811]: I0128 17:41:18.050719 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-h89sv"] Jan 28 17:41:18 crc kubenswrapper[4811]: I0128 17:41:18.059951 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-h89sv"] Jan 28 17:41:18 crc kubenswrapper[4811]: I0128 17:41:18.352256 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5091e50e-aea2-4b79-aadc-275cd468319d" path="/var/lib/kubelet/pods/5091e50e-aea2-4b79-aadc-275cd468319d/volumes" Jan 28 17:41:19 crc kubenswrapper[4811]: I0128 17:41:19.043086 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-845d-account-create-update-wml6w"] Jan 28 17:41:19 crc kubenswrapper[4811]: I0128 17:41:19.054107 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-845d-account-create-update-wml6w"] Jan 28 17:41:20 crc kubenswrapper[4811]: I0128 17:41:20.352243 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fb696c6-0e6d-4bc8-9da2-67876886c2a7" path="/var/lib/kubelet/pods/9fb696c6-0e6d-4bc8-9da2-67876886c2a7/volumes" Jan 28 17:41:30 crc kubenswrapper[4811]: I0128 17:41:30.036834 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-x45v9"] Jan 28 17:41:30 crc kubenswrapper[4811]: I0128 17:41:30.049048 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-x45v9"] Jan 28 17:41:30 crc kubenswrapper[4811]: I0128 17:41:30.353934 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4432726-194b-43c5-8403-e1ecb8cda9c5" path="/var/lib/kubelet/pods/e4432726-194b-43c5-8403-e1ecb8cda9c5/volumes" Jan 28 17:41:33 crc kubenswrapper[4811]: I0128 17:41:33.086970 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:41:33 crc kubenswrapper[4811]: I0128 17:41:33.087575 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:41:36 crc kubenswrapper[4811]: I0128 17:41:36.554578 4811 scope.go:117] "RemoveContainer" containerID="06af11448ccdae6ae772862180ab6ca7b05ee3c967e0ddea2d88038ba6230ea1" Jan 28 17:41:36 crc kubenswrapper[4811]: I0128 17:41:36.587790 4811 scope.go:117] "RemoveContainer" containerID="e1287edb0431b6d37fc62e015bc34f1437896e472fd002e5b10f6e64bc516d01" Jan 28 17:41:36 crc kubenswrapper[4811]: I0128 17:41:36.644744 4811 scope.go:117] "RemoveContainer" containerID="6ed14d1d7d2a52efddac8669a018fb568046d82002466f957b945bed9b69205c" Jan 28 17:41:54 crc kubenswrapper[4811]: I0128 17:41:54.040318 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-d4f7-account-create-update-lnp5z"] Jan 28 17:41:54 crc kubenswrapper[4811]: I0128 17:41:54.050103 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-9vf2j"] Jan 28 17:41:54 crc kubenswrapper[4811]: I0128 17:41:54.061373 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-9vf2j"] Jan 28 17:41:54 crc kubenswrapper[4811]: I0128 17:41:54.070224 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-d4f7-account-create-update-lnp5z"] Jan 28 17:41:54 crc kubenswrapper[4811]: I0128 17:41:54.353307 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f548650-0d4f-4bb2-87e5-24f2e4291eeb" path="/var/lib/kubelet/pods/4f548650-0d4f-4bb2-87e5-24f2e4291eeb/volumes" Jan 28 17:41:54 crc kubenswrapper[4811]: I0128 17:41:54.354157 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af23e46d-fc06-409c-ab89-9da29e7c7eae" path="/var/lib/kubelet/pods/af23e46d-fc06-409c-ab89-9da29e7c7eae/volumes" Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.087221 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.087868 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.087921 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.089037 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.089377 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" gracePeriod=600 Jan 28 17:42:03 crc kubenswrapper[4811]: E0128 17:42:03.208688 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.367618 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" exitCode=0 Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.367677 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7"} Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.367721 4811 scope.go:117] "RemoveContainer" containerID="2008bc8938819da747e2484f03f5e9a84bd57754f7787484853ede70e0f45bd7" Jan 28 17:42:03 crc kubenswrapper[4811]: I0128 17:42:03.368455 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:42:03 crc kubenswrapper[4811]: E0128 17:42:03.368739 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:42:15 crc kubenswrapper[4811]: I0128 17:42:15.340102 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:42:15 crc kubenswrapper[4811]: E0128 17:42:15.341199 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:42:17 crc kubenswrapper[4811]: I0128 17:42:17.042084 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-wbztr"] Jan 28 17:42:17 crc kubenswrapper[4811]: I0128 17:42:17.053086 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-wbztr"] Jan 28 17:42:18 crc kubenswrapper[4811]: I0128 17:42:18.355355 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d47a9735-0b67-4c06-812c-07bd37e9002d" path="/var/lib/kubelet/pods/d47a9735-0b67-4c06-812c-07bd37e9002d/volumes" Jan 28 17:42:29 crc kubenswrapper[4811]: I0128 17:42:29.340701 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:42:29 crc kubenswrapper[4811]: E0128 17:42:29.341382 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:42:36 crc kubenswrapper[4811]: I0128 17:42:36.848479 4811 scope.go:117] "RemoveContainer" containerID="eb7e9bc5c819711b5ae24de1b9c8d76ad77a47a050e17813ff1235c17b2c8b83" Jan 28 17:42:36 crc kubenswrapper[4811]: I0128 17:42:36.880416 4811 scope.go:117] "RemoveContainer" containerID="651b973e5990a6b7476d0762d53a2a169d8a1e830c576151f159af22bb494fe1" Jan 28 17:42:36 crc kubenswrapper[4811]: I0128 17:42:36.927810 4811 scope.go:117] "RemoveContainer" containerID="0d1ea42c2c9771661ba140247df0064e66e8e14ac8ac180f2f157e89c7e01d5f" Jan 28 17:42:40 crc kubenswrapper[4811]: I0128 17:42:40.342164 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:42:40 crc kubenswrapper[4811]: E0128 17:42:40.343038 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:42:51 crc kubenswrapper[4811]: I0128 17:42:51.339582 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:42:51 crc kubenswrapper[4811]: E0128 17:42:51.340359 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:43:06 crc kubenswrapper[4811]: I0128 17:43:06.340850 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:43:06 crc kubenswrapper[4811]: E0128 17:43:06.341861 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.838694 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rnxq4"] Jan 28 17:43:14 crc kubenswrapper[4811]: E0128 17:43:14.840137 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="extract-utilities" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840151 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="extract-utilities" Jan 28 17:43:14 crc kubenswrapper[4811]: E0128 17:43:14.840193 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="registry-server" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840201 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="registry-server" Jan 28 17:43:14 crc kubenswrapper[4811]: E0128 17:43:14.840218 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="extract-content" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840227 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="extract-content" Jan 28 17:43:14 crc kubenswrapper[4811]: E0128 17:43:14.840236 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="extract-utilities" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840244 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="extract-utilities" Jan 28 17:43:14 crc kubenswrapper[4811]: E0128 17:43:14.840275 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="registry-server" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840281 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="registry-server" Jan 28 17:43:14 crc kubenswrapper[4811]: E0128 17:43:14.840293 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="extract-content" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840300 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="extract-content" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840755 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c46619-f2da-4c61-ae02-463abe5d433a" containerName="registry-server" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.840801 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="7245ae6b-c852-4379-b449-d8020c8df3be" containerName="registry-server" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.846223 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.858342 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rnxq4"] Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.940379 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-utilities\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.940456 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-catalog-content\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:14 crc kubenswrapper[4811]: I0128 17:43:14.940685 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgdth\" (UniqueName: \"kubernetes.io/projected/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-kube-api-access-dgdth\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.042803 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgdth\" (UniqueName: \"kubernetes.io/projected/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-kube-api-access-dgdth\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.042908 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-utilities\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.042936 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-catalog-content\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.043389 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-catalog-content\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.043622 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-utilities\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.064023 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgdth\" (UniqueName: \"kubernetes.io/projected/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-kube-api-access-dgdth\") pod \"redhat-operators-rnxq4\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.178487 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:15 crc kubenswrapper[4811]: I0128 17:43:15.662515 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rnxq4"] Jan 28 17:43:16 crc kubenswrapper[4811]: I0128 17:43:16.090790 4811 generic.go:334] "Generic (PLEG): container finished" podID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerID="dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054" exitCode=0 Jan 28 17:43:16 crc kubenswrapper[4811]: I0128 17:43:16.090910 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerDied","Data":"dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054"} Jan 28 17:43:16 crc kubenswrapper[4811]: I0128 17:43:16.091125 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerStarted","Data":"2032ba3806595b7e175b366475dbcd8b797881a4a05703d0a66b897f9c46c021"} Jan 28 17:43:16 crc kubenswrapper[4811]: I0128 17:43:16.093197 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:43:17 crc kubenswrapper[4811]: I0128 17:43:17.103258 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerStarted","Data":"6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef"} Jan 28 17:43:17 crc kubenswrapper[4811]: I0128 17:43:17.339914 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:43:17 crc kubenswrapper[4811]: E0128 17:43:17.340152 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:43:23 crc kubenswrapper[4811]: I0128 17:43:23.159977 4811 generic.go:334] "Generic (PLEG): container finished" podID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerID="6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef" exitCode=0 Jan 28 17:43:23 crc kubenswrapper[4811]: I0128 17:43:23.160056 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerDied","Data":"6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef"} Jan 28 17:43:24 crc kubenswrapper[4811]: I0128 17:43:24.172864 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerStarted","Data":"21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd"} Jan 28 17:43:24 crc kubenswrapper[4811]: I0128 17:43:24.192451 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rnxq4" podStartSLOduration=2.715464318 podStartE2EDuration="10.192413938s" podCreationTimestamp="2026-01-28 17:43:14 +0000 UTC" firstStartedPulling="2026-01-28 17:43:16.092950876 +0000 UTC m=+7088.847314459" lastFinishedPulling="2026-01-28 17:43:23.569900486 +0000 UTC m=+7096.324264079" observedRunningTime="2026-01-28 17:43:24.191033131 +0000 UTC m=+7096.945396714" watchObservedRunningTime="2026-01-28 17:43:24.192413938 +0000 UTC m=+7096.946777521" Jan 28 17:43:25 crc kubenswrapper[4811]: I0128 17:43:25.179029 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:25 crc kubenswrapper[4811]: I0128 17:43:25.179355 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:26 crc kubenswrapper[4811]: I0128 17:43:26.227934 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rnxq4" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="registry-server" probeResult="failure" output=< Jan 28 17:43:26 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 17:43:26 crc kubenswrapper[4811]: > Jan 28 17:43:32 crc kubenswrapper[4811]: I0128 17:43:32.339954 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:43:32 crc kubenswrapper[4811]: E0128 17:43:32.341123 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:43:35 crc kubenswrapper[4811]: I0128 17:43:35.230078 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:35 crc kubenswrapper[4811]: I0128 17:43:35.294602 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:35 crc kubenswrapper[4811]: I0128 17:43:35.466191 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rnxq4"] Jan 28 17:43:36 crc kubenswrapper[4811]: I0128 17:43:36.278367 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rnxq4" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="registry-server" containerID="cri-o://21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd" gracePeriod=2 Jan 28 17:43:36 crc kubenswrapper[4811]: I0128 17:43:36.826723 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.009109 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgdth\" (UniqueName: \"kubernetes.io/projected/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-kube-api-access-dgdth\") pod \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.009155 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-catalog-content\") pod \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.009309 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-utilities\") pod \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\" (UID: \"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a\") " Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.010198 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-utilities" (OuterVolumeSpecName: "utilities") pod "69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" (UID: "69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.021724 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-kube-api-access-dgdth" (OuterVolumeSpecName: "kube-api-access-dgdth") pod "69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" (UID: "69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a"). InnerVolumeSpecName "kube-api-access-dgdth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.112257 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.112640 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgdth\" (UniqueName: \"kubernetes.io/projected/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-kube-api-access-dgdth\") on node \"crc\" DevicePath \"\"" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.129472 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" (UID: "69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.215161 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.544282 4811 generic.go:334] "Generic (PLEG): container finished" podID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerID="21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd" exitCode=0 Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.544553 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerDied","Data":"21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd"} Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.544809 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rnxq4" event={"ID":"69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a","Type":"ContainerDied","Data":"2032ba3806595b7e175b366475dbcd8b797881a4a05703d0a66b897f9c46c021"} Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.544840 4811 scope.go:117] "RemoveContainer" containerID="21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.544655 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rnxq4" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.590984 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rnxq4"] Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.591710 4811 scope.go:117] "RemoveContainer" containerID="6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.600027 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rnxq4"] Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.620917 4811 scope.go:117] "RemoveContainer" containerID="dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.666307 4811 scope.go:117] "RemoveContainer" containerID="21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd" Jan 28 17:43:37 crc kubenswrapper[4811]: E0128 17:43:37.666807 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd\": container with ID starting with 21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd not found: ID does not exist" containerID="21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.666860 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd"} err="failed to get container status \"21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd\": rpc error: code = NotFound desc = could not find container \"21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd\": container with ID starting with 21ca9853b041f3a75832dab2264db2c9572aafc3b978bcac0f6a5064b99370dd not found: ID does not exist" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.666894 4811 scope.go:117] "RemoveContainer" containerID="6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef" Jan 28 17:43:37 crc kubenswrapper[4811]: E0128 17:43:37.667494 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef\": container with ID starting with 6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef not found: ID does not exist" containerID="6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.667530 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef"} err="failed to get container status \"6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef\": rpc error: code = NotFound desc = could not find container \"6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef\": container with ID starting with 6b89e8e4ce46ab81e605bae0676666b57d08d24cc0a5fc5ed2d961488a5f20ef not found: ID does not exist" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.667562 4811 scope.go:117] "RemoveContainer" containerID="dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054" Jan 28 17:43:37 crc kubenswrapper[4811]: E0128 17:43:37.667820 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054\": container with ID starting with dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054 not found: ID does not exist" containerID="dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054" Jan 28 17:43:37 crc kubenswrapper[4811]: I0128 17:43:37.667853 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054"} err="failed to get container status \"dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054\": rpc error: code = NotFound desc = could not find container \"dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054\": container with ID starting with dbae183ae7daece0d7bb0c319d716d10d3b6e86e0b70e603cba3c405aab35054 not found: ID does not exist" Jan 28 17:43:38 crc kubenswrapper[4811]: I0128 17:43:38.353486 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" path="/var/lib/kubelet/pods/69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a/volumes" Jan 28 17:43:43 crc kubenswrapper[4811]: I0128 17:43:43.340454 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:43:43 crc kubenswrapper[4811]: E0128 17:43:43.341013 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:43:54 crc kubenswrapper[4811]: I0128 17:43:54.340405 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:43:54 crc kubenswrapper[4811]: E0128 17:43:54.343107 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:44:07 crc kubenswrapper[4811]: I0128 17:44:07.340200 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:44:07 crc kubenswrapper[4811]: E0128 17:44:07.341050 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:44:22 crc kubenswrapper[4811]: I0128 17:44:22.339504 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:44:22 crc kubenswrapper[4811]: E0128 17:44:22.340665 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:44:33 crc kubenswrapper[4811]: I0128 17:44:33.339814 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:44:33 crc kubenswrapper[4811]: E0128 17:44:33.340792 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:44:47 crc kubenswrapper[4811]: I0128 17:44:47.339782 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:44:47 crc kubenswrapper[4811]: E0128 17:44:47.340701 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:44:58 crc kubenswrapper[4811]: I0128 17:44:58.348408 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:44:58 crc kubenswrapper[4811]: E0128 17:44:58.349521 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.143702 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598"] Jan 28 17:45:00 crc kubenswrapper[4811]: E0128 17:45:00.144516 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="extract-utilities" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.144538 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="extract-utilities" Jan 28 17:45:00 crc kubenswrapper[4811]: E0128 17:45:00.144569 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="extract-content" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.144575 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="extract-content" Jan 28 17:45:00 crc kubenswrapper[4811]: E0128 17:45:00.144594 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="registry-server" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.144602 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="registry-server" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.144817 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="69fcc1ce-3528-4a6e-989a-81d5ab3c6d2a" containerName="registry-server" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.145653 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.149028 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.149141 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.164082 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598"] Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.249016 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmwqt\" (UniqueName: \"kubernetes.io/projected/be69614c-cb2d-44c4-9ff1-bec81201f8b2-kube-api-access-nmwqt\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.249367 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be69614c-cb2d-44c4-9ff1-bec81201f8b2-config-volume\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.249514 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be69614c-cb2d-44c4-9ff1-bec81201f8b2-secret-volume\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.353636 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be69614c-cb2d-44c4-9ff1-bec81201f8b2-config-volume\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.353791 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be69614c-cb2d-44c4-9ff1-bec81201f8b2-secret-volume\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.354095 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmwqt\" (UniqueName: \"kubernetes.io/projected/be69614c-cb2d-44c4-9ff1-bec81201f8b2-kube-api-access-nmwqt\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.354883 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be69614c-cb2d-44c4-9ff1-bec81201f8b2-config-volume\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.364384 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be69614c-cb2d-44c4-9ff1-bec81201f8b2-secret-volume\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.378011 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmwqt\" (UniqueName: \"kubernetes.io/projected/be69614c-cb2d-44c4-9ff1-bec81201f8b2-kube-api-access-nmwqt\") pod \"collect-profiles-29493705-lm598\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.471380 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:00 crc kubenswrapper[4811]: I0128 17:45:00.970918 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598"] Jan 28 17:45:01 crc kubenswrapper[4811]: I0128 17:45:01.315037 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" event={"ID":"be69614c-cb2d-44c4-9ff1-bec81201f8b2","Type":"ContainerStarted","Data":"5877777ba5409a8778414ab059aeb96fa5c0038bdb8c37c733fb75ea9a9624c8"} Jan 28 17:45:01 crc kubenswrapper[4811]: I0128 17:45:01.315356 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" event={"ID":"be69614c-cb2d-44c4-9ff1-bec81201f8b2","Type":"ContainerStarted","Data":"9486def1d9c1cfc08a4899176634133373e1cc9352b09dde133cc07802740dac"} Jan 28 17:45:01 crc kubenswrapper[4811]: I0128 17:45:01.344339 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" podStartSLOduration=1.3443157110000001 podStartE2EDuration="1.344315711s" podCreationTimestamp="2026-01-28 17:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 17:45:01.332264145 +0000 UTC m=+7194.086627728" watchObservedRunningTime="2026-01-28 17:45:01.344315711 +0000 UTC m=+7194.098679294" Jan 28 17:45:02 crc kubenswrapper[4811]: I0128 17:45:02.382583 4811 generic.go:334] "Generic (PLEG): container finished" podID="be69614c-cb2d-44c4-9ff1-bec81201f8b2" containerID="5877777ba5409a8778414ab059aeb96fa5c0038bdb8c37c733fb75ea9a9624c8" exitCode=0 Jan 28 17:45:02 crc kubenswrapper[4811]: I0128 17:45:02.385866 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" event={"ID":"be69614c-cb2d-44c4-9ff1-bec81201f8b2","Type":"ContainerDied","Data":"5877777ba5409a8778414ab059aeb96fa5c0038bdb8c37c733fb75ea9a9624c8"} Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.783663 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.942106 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmwqt\" (UniqueName: \"kubernetes.io/projected/be69614c-cb2d-44c4-9ff1-bec81201f8b2-kube-api-access-nmwqt\") pod \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.942519 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be69614c-cb2d-44c4-9ff1-bec81201f8b2-config-volume\") pod \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.942793 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be69614c-cb2d-44c4-9ff1-bec81201f8b2-secret-volume\") pod \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\" (UID: \"be69614c-cb2d-44c4-9ff1-bec81201f8b2\") " Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.943205 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be69614c-cb2d-44c4-9ff1-bec81201f8b2-config-volume" (OuterVolumeSpecName: "config-volume") pod "be69614c-cb2d-44c4-9ff1-bec81201f8b2" (UID: "be69614c-cb2d-44c4-9ff1-bec81201f8b2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.944040 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be69614c-cb2d-44c4-9ff1-bec81201f8b2-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.951684 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be69614c-cb2d-44c4-9ff1-bec81201f8b2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "be69614c-cb2d-44c4-9ff1-bec81201f8b2" (UID: "be69614c-cb2d-44c4-9ff1-bec81201f8b2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:45:03 crc kubenswrapper[4811]: I0128 17:45:03.951753 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be69614c-cb2d-44c4-9ff1-bec81201f8b2-kube-api-access-nmwqt" (OuterVolumeSpecName: "kube-api-access-nmwqt") pod "be69614c-cb2d-44c4-9ff1-bec81201f8b2" (UID: "be69614c-cb2d-44c4-9ff1-bec81201f8b2"). InnerVolumeSpecName "kube-api-access-nmwqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.045978 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmwqt\" (UniqueName: \"kubernetes.io/projected/be69614c-cb2d-44c4-9ff1-bec81201f8b2-kube-api-access-nmwqt\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.046016 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be69614c-cb2d-44c4-9ff1-bec81201f8b2-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.410671 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" event={"ID":"be69614c-cb2d-44c4-9ff1-bec81201f8b2","Type":"ContainerDied","Data":"9486def1d9c1cfc08a4899176634133373e1cc9352b09dde133cc07802740dac"} Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.410940 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9486def1d9c1cfc08a4899176634133373e1cc9352b09dde133cc07802740dac" Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.410722 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598" Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.419827 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594"] Jan 28 17:45:04 crc kubenswrapper[4811]: I0128 17:45:04.428680 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493660-fq594"] Jan 28 17:45:06 crc kubenswrapper[4811]: I0128 17:45:06.359178 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e9aa33-6d76-454b-8bec-5bdebdc5affc" path="/var/lib/kubelet/pods/25e9aa33-6d76-454b-8bec-5bdebdc5affc/volumes" Jan 28 17:45:09 crc kubenswrapper[4811]: I0128 17:45:09.340634 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:45:09 crc kubenswrapper[4811]: E0128 17:45:09.341560 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:45:15 crc kubenswrapper[4811]: I0128 17:45:15.518727 4811 generic.go:334] "Generic (PLEG): container finished" podID="69e817eb-c2f6-4173-be32-04233e446173" containerID="c19992a59b5fc0e2384f016de8d51b6d3dd46af92881d2308d54779edf866f8c" exitCode=0 Jan 28 17:45:15 crc kubenswrapper[4811]: I0128 17:45:15.519245 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" event={"ID":"69e817eb-c2f6-4173-be32-04233e446173","Type":"ContainerDied","Data":"c19992a59b5fc0e2384f016de8d51b6d3dd46af92881d2308d54779edf866f8c"} Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.026721 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.228496 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-tripleo-cleanup-combined-ca-bundle\") pod \"69e817eb-c2f6-4173-be32-04233e446173\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.228649 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-inventory\") pod \"69e817eb-c2f6-4173-be32-04233e446173\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.228788 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2swpk\" (UniqueName: \"kubernetes.io/projected/69e817eb-c2f6-4173-be32-04233e446173-kube-api-access-2swpk\") pod \"69e817eb-c2f6-4173-be32-04233e446173\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.228832 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ceph\") pod \"69e817eb-c2f6-4173-be32-04233e446173\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.228857 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ssh-key-openstack-cell1\") pod \"69e817eb-c2f6-4173-be32-04233e446173\" (UID: \"69e817eb-c2f6-4173-be32-04233e446173\") " Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.235009 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ceph" (OuterVolumeSpecName: "ceph") pod "69e817eb-c2f6-4173-be32-04233e446173" (UID: "69e817eb-c2f6-4173-be32-04233e446173"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.235065 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "69e817eb-c2f6-4173-be32-04233e446173" (UID: "69e817eb-c2f6-4173-be32-04233e446173"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.238726 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69e817eb-c2f6-4173-be32-04233e446173-kube-api-access-2swpk" (OuterVolumeSpecName: "kube-api-access-2swpk") pod "69e817eb-c2f6-4173-be32-04233e446173" (UID: "69e817eb-c2f6-4173-be32-04233e446173"). InnerVolumeSpecName "kube-api-access-2swpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.262344 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "69e817eb-c2f6-4173-be32-04233e446173" (UID: "69e817eb-c2f6-4173-be32-04233e446173"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.263452 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-inventory" (OuterVolumeSpecName: "inventory") pod "69e817eb-c2f6-4173-be32-04233e446173" (UID: "69e817eb-c2f6-4173-be32-04233e446173"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.331816 4811 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.331860 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.331874 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2swpk\" (UniqueName: \"kubernetes.io/projected/69e817eb-c2f6-4173-be32-04233e446173-kube-api-access-2swpk\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.331885 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.331896 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e817eb-c2f6-4173-be32-04233e446173-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.541519 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" event={"ID":"69e817eb-c2f6-4173-be32-04233e446173","Type":"ContainerDied","Data":"f640f9284d51542b9a7b4864b25517d186fa48d41412afa8dbfa5a6aba1fdf78"} Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.541579 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f640f9284d51542b9a7b4864b25517d186fa48d41412afa8dbfa5a6aba1fdf78" Jan 28 17:45:17 crc kubenswrapper[4811]: I0128 17:45:17.541653 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.227025 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-n5sjm"] Jan 28 17:45:22 crc kubenswrapper[4811]: E0128 17:45:22.227928 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be69614c-cb2d-44c4-9ff1-bec81201f8b2" containerName="collect-profiles" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.227939 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="be69614c-cb2d-44c4-9ff1-bec81201f8b2" containerName="collect-profiles" Jan 28 17:45:22 crc kubenswrapper[4811]: E0128 17:45:22.227972 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e817eb-c2f6-4173-be32-04233e446173" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.227980 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e817eb-c2f6-4173-be32-04233e446173" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.228176 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e817eb-c2f6-4173-be32-04233e446173" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.228198 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="be69614c-cb2d-44c4-9ff1-bec81201f8b2" containerName="collect-profiles" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.228949 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.232776 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.232837 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.232934 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.233105 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.244608 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-n5sjm"] Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.341981 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-inventory\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.342050 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.342298 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:45:22 crc kubenswrapper[4811]: E0128 17:45:22.343066 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.343265 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt2cb\" (UniqueName: \"kubernetes.io/projected/0f2767ed-1bc7-4828-8084-29e418f02fbb-kube-api-access-tt2cb\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.343406 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.343642 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ceph\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.445627 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ceph\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.447007 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-inventory\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.447134 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.447217 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt2cb\" (UniqueName: \"kubernetes.io/projected/0f2767ed-1bc7-4828-8084-29e418f02fbb-kube-api-access-tt2cb\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.447326 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.455015 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.455328 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-inventory\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.455340 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ceph\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.455394 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.463726 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt2cb\" (UniqueName: \"kubernetes.io/projected/0f2767ed-1bc7-4828-8084-29e418f02fbb-kube-api-access-tt2cb\") pod \"bootstrap-openstack-openstack-cell1-n5sjm\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:22 crc kubenswrapper[4811]: I0128 17:45:22.560044 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:45:23 crc kubenswrapper[4811]: I0128 17:45:23.295590 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-n5sjm"] Jan 28 17:45:23 crc kubenswrapper[4811]: I0128 17:45:23.605747 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" event={"ID":"0f2767ed-1bc7-4828-8084-29e418f02fbb","Type":"ContainerStarted","Data":"2cfd88ea1d5e18748ae0233d8df3d5d87fdb9d0648341c523941ac457139ea11"} Jan 28 17:45:24 crc kubenswrapper[4811]: I0128 17:45:24.615141 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" event={"ID":"0f2767ed-1bc7-4828-8084-29e418f02fbb","Type":"ContainerStarted","Data":"3f3f3fb81b72fcdd84ddbd01957ab06290d3850501c1a9abc202e40c1579d5d5"} Jan 28 17:45:24 crc kubenswrapper[4811]: I0128 17:45:24.632193 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" podStartSLOduration=2.173039322 podStartE2EDuration="2.632174469s" podCreationTimestamp="2026-01-28 17:45:22 +0000 UTC" firstStartedPulling="2026-01-28 17:45:23.309193748 +0000 UTC m=+7216.063557331" lastFinishedPulling="2026-01-28 17:45:23.768328905 +0000 UTC m=+7216.522692478" observedRunningTime="2026-01-28 17:45:24.630137945 +0000 UTC m=+7217.384501528" watchObservedRunningTime="2026-01-28 17:45:24.632174469 +0000 UTC m=+7217.386538042" Jan 28 17:45:37 crc kubenswrapper[4811]: I0128 17:45:37.081127 4811 scope.go:117] "RemoveContainer" containerID="9ada8ef4070520e747e5ea970a70a63de5b86abd9776f11fd0ddf6608bb52b39" Jan 28 17:45:37 crc kubenswrapper[4811]: I0128 17:45:37.339362 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:45:37 crc kubenswrapper[4811]: E0128 17:45:37.340041 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:45:51 crc kubenswrapper[4811]: I0128 17:45:51.339666 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:45:51 crc kubenswrapper[4811]: E0128 17:45:51.340455 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:46:03 crc kubenswrapper[4811]: I0128 17:46:03.341493 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:46:03 crc kubenswrapper[4811]: E0128 17:46:03.342925 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:46:17 crc kubenswrapper[4811]: I0128 17:46:17.339713 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:46:17 crc kubenswrapper[4811]: E0128 17:46:17.340361 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:46:28 crc kubenswrapper[4811]: I0128 17:46:28.339319 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:46:28 crc kubenswrapper[4811]: E0128 17:46:28.340132 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:46:40 crc kubenswrapper[4811]: I0128 17:46:40.340100 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:46:40 crc kubenswrapper[4811]: E0128 17:46:40.341077 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:46:51 crc kubenswrapper[4811]: I0128 17:46:51.339596 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:46:51 crc kubenswrapper[4811]: E0128 17:46:51.342180 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:46:55 crc kubenswrapper[4811]: I0128 17:46:55.922281 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qt8kj"] Jan 28 17:46:55 crc kubenswrapper[4811]: I0128 17:46:55.925232 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:55 crc kubenswrapper[4811]: I0128 17:46:55.942403 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qt8kj"] Jan 28 17:46:55 crc kubenswrapper[4811]: I0128 17:46:55.960338 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-utilities\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:55 crc kubenswrapper[4811]: I0128 17:46:55.960431 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb8st\" (UniqueName: \"kubernetes.io/projected/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-kube-api-access-lb8st\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:55 crc kubenswrapper[4811]: I0128 17:46:55.960739 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-catalog-content\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.063172 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb8st\" (UniqueName: \"kubernetes.io/projected/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-kube-api-access-lb8st\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.063374 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-catalog-content\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.063610 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-utilities\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.064085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-catalog-content\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.064132 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-utilities\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.082224 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb8st\" (UniqueName: \"kubernetes.io/projected/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-kube-api-access-lb8st\") pod \"certified-operators-qt8kj\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.246597 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:46:56 crc kubenswrapper[4811]: I0128 17:46:56.748509 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qt8kj"] Jan 28 17:46:57 crc kubenswrapper[4811]: I0128 17:46:57.536407 4811 generic.go:334] "Generic (PLEG): container finished" podID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerID="7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402" exitCode=0 Jan 28 17:46:57 crc kubenswrapper[4811]: I0128 17:46:57.536490 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerDied","Data":"7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402"} Jan 28 17:46:57 crc kubenswrapper[4811]: I0128 17:46:57.536798 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerStarted","Data":"3fccebd762d77521db1cd35aa71cbd542a689ba40f31ae090f8953998057d412"} Jan 28 17:46:58 crc kubenswrapper[4811]: I0128 17:46:58.548281 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerStarted","Data":"11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb"} Jan 28 17:47:00 crc kubenswrapper[4811]: I0128 17:47:00.569632 4811 generic.go:334] "Generic (PLEG): container finished" podID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerID="11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb" exitCode=0 Jan 28 17:47:00 crc kubenswrapper[4811]: I0128 17:47:00.569706 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerDied","Data":"11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb"} Jan 28 17:47:01 crc kubenswrapper[4811]: I0128 17:47:01.579598 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerStarted","Data":"9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996"} Jan 28 17:47:01 crc kubenswrapper[4811]: I0128 17:47:01.606067 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qt8kj" podStartSLOduration=3.116753653 podStartE2EDuration="6.606051774s" podCreationTimestamp="2026-01-28 17:46:55 +0000 UTC" firstStartedPulling="2026-01-28 17:46:57.538119836 +0000 UTC m=+7310.292483419" lastFinishedPulling="2026-01-28 17:47:01.027417957 +0000 UTC m=+7313.781781540" observedRunningTime="2026-01-28 17:47:01.598696995 +0000 UTC m=+7314.353060578" watchObservedRunningTime="2026-01-28 17:47:01.606051774 +0000 UTC m=+7314.360415357" Jan 28 17:47:03 crc kubenswrapper[4811]: I0128 17:47:03.339776 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:47:03 crc kubenswrapper[4811]: I0128 17:47:03.602117 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"b2b40db7ba92973c78c86379b0152ed793614e93e3ce55a3e2a3a4ff04fcc7c9"} Jan 28 17:47:06 crc kubenswrapper[4811]: I0128 17:47:06.246975 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:47:06 crc kubenswrapper[4811]: I0128 17:47:06.247538 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:47:06 crc kubenswrapper[4811]: I0128 17:47:06.312546 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:47:06 crc kubenswrapper[4811]: I0128 17:47:06.725909 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:47:06 crc kubenswrapper[4811]: I0128 17:47:06.775755 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qt8kj"] Jan 28 17:47:08 crc kubenswrapper[4811]: I0128 17:47:08.648512 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qt8kj" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="registry-server" containerID="cri-o://9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996" gracePeriod=2 Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.166275 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.264200 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-catalog-content\") pod \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.264307 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb8st\" (UniqueName: \"kubernetes.io/projected/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-kube-api-access-lb8st\") pod \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.264369 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-utilities\") pod \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\" (UID: \"c23a90c8-fcd4-4065-bccc-7d8a022bef0a\") " Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.266043 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-utilities" (OuterVolumeSpecName: "utilities") pod "c23a90c8-fcd4-4065-bccc-7d8a022bef0a" (UID: "c23a90c8-fcd4-4065-bccc-7d8a022bef0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.271291 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-kube-api-access-lb8st" (OuterVolumeSpecName: "kube-api-access-lb8st") pod "c23a90c8-fcd4-4065-bccc-7d8a022bef0a" (UID: "c23a90c8-fcd4-4065-bccc-7d8a022bef0a"). InnerVolumeSpecName "kube-api-access-lb8st". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.320795 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c23a90c8-fcd4-4065-bccc-7d8a022bef0a" (UID: "c23a90c8-fcd4-4065-bccc-7d8a022bef0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.366693 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.366741 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb8st\" (UniqueName: \"kubernetes.io/projected/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-kube-api-access-lb8st\") on node \"crc\" DevicePath \"\"" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.366755 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23a90c8-fcd4-4065-bccc-7d8a022bef0a-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.659394 4811 generic.go:334] "Generic (PLEG): container finished" podID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerID="9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996" exitCode=0 Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.659470 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt8kj" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.659485 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerDied","Data":"9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996"} Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.660808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt8kj" event={"ID":"c23a90c8-fcd4-4065-bccc-7d8a022bef0a","Type":"ContainerDied","Data":"3fccebd762d77521db1cd35aa71cbd542a689ba40f31ae090f8953998057d412"} Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.660902 4811 scope.go:117] "RemoveContainer" containerID="9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.702808 4811 scope.go:117] "RemoveContainer" containerID="11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.707624 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qt8kj"] Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.719114 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qt8kj"] Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.726337 4811 scope.go:117] "RemoveContainer" containerID="7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.770480 4811 scope.go:117] "RemoveContainer" containerID="9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996" Jan 28 17:47:09 crc kubenswrapper[4811]: E0128 17:47:09.770850 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996\": container with ID starting with 9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996 not found: ID does not exist" containerID="9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.770893 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996"} err="failed to get container status \"9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996\": rpc error: code = NotFound desc = could not find container \"9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996\": container with ID starting with 9a20007d4f58c4e29b6a2abb34e93787da6213fd148f3fff00efc686b85d0996 not found: ID does not exist" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.770918 4811 scope.go:117] "RemoveContainer" containerID="11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb" Jan 28 17:47:09 crc kubenswrapper[4811]: E0128 17:47:09.771455 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb\": container with ID starting with 11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb not found: ID does not exist" containerID="11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.771509 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb"} err="failed to get container status \"11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb\": rpc error: code = NotFound desc = could not find container \"11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb\": container with ID starting with 11a6729989c8562574b3c82ddb6b614753afbd145cf16e85ce2867c0e68dd2fb not found: ID does not exist" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.771558 4811 scope.go:117] "RemoveContainer" containerID="7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402" Jan 28 17:47:09 crc kubenswrapper[4811]: E0128 17:47:09.771851 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402\": container with ID starting with 7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402 not found: ID does not exist" containerID="7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402" Jan 28 17:47:09 crc kubenswrapper[4811]: I0128 17:47:09.771882 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402"} err="failed to get container status \"7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402\": rpc error: code = NotFound desc = could not find container \"7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402\": container with ID starting with 7124fa4fc88fad7f4b98dbdd9912ad08c829229cbb71bc9ec09284b343cf9402 not found: ID does not exist" Jan 28 17:47:10 crc kubenswrapper[4811]: I0128 17:47:10.350408 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" path="/var/lib/kubelet/pods/c23a90c8-fcd4-4065-bccc-7d8a022bef0a/volumes" Jan 28 17:48:32 crc kubenswrapper[4811]: I0128 17:48:32.457614 4811 generic.go:334] "Generic (PLEG): container finished" podID="0f2767ed-1bc7-4828-8084-29e418f02fbb" containerID="3f3f3fb81b72fcdd84ddbd01957ab06290d3850501c1a9abc202e40c1579d5d5" exitCode=0 Jan 28 17:48:32 crc kubenswrapper[4811]: I0128 17:48:32.459129 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" event={"ID":"0f2767ed-1bc7-4828-8084-29e418f02fbb","Type":"ContainerDied","Data":"3f3f3fb81b72fcdd84ddbd01957ab06290d3850501c1a9abc202e40c1579d5d5"} Jan 28 17:48:33 crc kubenswrapper[4811]: I0128 17:48:33.975891 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.160179 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ssh-key-openstack-cell1\") pod \"0f2767ed-1bc7-4828-8084-29e418f02fbb\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.160253 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-bootstrap-combined-ca-bundle\") pod \"0f2767ed-1bc7-4828-8084-29e418f02fbb\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.160296 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-inventory\") pod \"0f2767ed-1bc7-4828-8084-29e418f02fbb\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.160410 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt2cb\" (UniqueName: \"kubernetes.io/projected/0f2767ed-1bc7-4828-8084-29e418f02fbb-kube-api-access-tt2cb\") pod \"0f2767ed-1bc7-4828-8084-29e418f02fbb\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.160517 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ceph\") pod \"0f2767ed-1bc7-4828-8084-29e418f02fbb\" (UID: \"0f2767ed-1bc7-4828-8084-29e418f02fbb\") " Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.166555 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "0f2767ed-1bc7-4828-8084-29e418f02fbb" (UID: "0f2767ed-1bc7-4828-8084-29e418f02fbb"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.168698 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ceph" (OuterVolumeSpecName: "ceph") pod "0f2767ed-1bc7-4828-8084-29e418f02fbb" (UID: "0f2767ed-1bc7-4828-8084-29e418f02fbb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.170253 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f2767ed-1bc7-4828-8084-29e418f02fbb-kube-api-access-tt2cb" (OuterVolumeSpecName: "kube-api-access-tt2cb") pod "0f2767ed-1bc7-4828-8084-29e418f02fbb" (UID: "0f2767ed-1bc7-4828-8084-29e418f02fbb"). InnerVolumeSpecName "kube-api-access-tt2cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.192590 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "0f2767ed-1bc7-4828-8084-29e418f02fbb" (UID: "0f2767ed-1bc7-4828-8084-29e418f02fbb"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.196042 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-inventory" (OuterVolumeSpecName: "inventory") pod "0f2767ed-1bc7-4828-8084-29e418f02fbb" (UID: "0f2767ed-1bc7-4828-8084-29e418f02fbb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.265516 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt2cb\" (UniqueName: \"kubernetes.io/projected/0f2767ed-1bc7-4828-8084-29e418f02fbb-kube-api-access-tt2cb\") on node \"crc\" DevicePath \"\"" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.265558 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.265570 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.265582 4811 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.265595 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f2767ed-1bc7-4828-8084-29e418f02fbb-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.478617 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" event={"ID":"0f2767ed-1bc7-4828-8084-29e418f02fbb","Type":"ContainerDied","Data":"2cfd88ea1d5e18748ae0233d8df3d5d87fdb9d0648341c523941ac457139ea11"} Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.478660 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cfd88ea1d5e18748ae0233d8df3d5d87fdb9d0648341c523941ac457139ea11" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.478710 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-n5sjm" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.580405 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-v5bff"] Jan 28 17:48:34 crc kubenswrapper[4811]: E0128 17:48:34.581005 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="extract-utilities" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.581037 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="extract-utilities" Jan 28 17:48:34 crc kubenswrapper[4811]: E0128 17:48:34.581071 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f2767ed-1bc7-4828-8084-29e418f02fbb" containerName="bootstrap-openstack-openstack-cell1" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.581080 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f2767ed-1bc7-4828-8084-29e418f02fbb" containerName="bootstrap-openstack-openstack-cell1" Jan 28 17:48:34 crc kubenswrapper[4811]: E0128 17:48:34.581098 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="registry-server" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.581107 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="registry-server" Jan 28 17:48:34 crc kubenswrapper[4811]: E0128 17:48:34.581139 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="extract-content" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.581146 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="extract-content" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.581372 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c23a90c8-fcd4-4065-bccc-7d8a022bef0a" containerName="registry-server" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.581414 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f2767ed-1bc7-4828-8084-29e418f02fbb" containerName="bootstrap-openstack-openstack-cell1" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.582471 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.586085 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.586646 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.586885 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.587286 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.592045 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-v5bff"] Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.672377 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ceph\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.672550 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtqhg\" (UniqueName: \"kubernetes.io/projected/2758f1a1-4474-4964-98eb-5046d0474346-kube-api-access-xtqhg\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.672638 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-inventory\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.672685 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.775543 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-inventory\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.775926 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.776238 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ceph\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.776414 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtqhg\" (UniqueName: \"kubernetes.io/projected/2758f1a1-4474-4964-98eb-5046d0474346-kube-api-access-xtqhg\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.786404 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-inventory\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.788137 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ceph\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.795824 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.813235 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtqhg\" (UniqueName: \"kubernetes.io/projected/2758f1a1-4474-4964-98eb-5046d0474346-kube-api-access-xtqhg\") pod \"download-cache-openstack-openstack-cell1-v5bff\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:34 crc kubenswrapper[4811]: I0128 17:48:34.911989 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:48:35 crc kubenswrapper[4811]: I0128 17:48:35.600268 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-v5bff"] Jan 28 17:48:35 crc kubenswrapper[4811]: I0128 17:48:35.613761 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:48:36 crc kubenswrapper[4811]: I0128 17:48:36.506038 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" event={"ID":"2758f1a1-4474-4964-98eb-5046d0474346","Type":"ContainerStarted","Data":"006c98b022738421a18eeb25b09c1bf7b22c262d828e10d21affd3967b12a5c1"} Jan 28 17:48:36 crc kubenswrapper[4811]: I0128 17:48:36.506401 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" event={"ID":"2758f1a1-4474-4964-98eb-5046d0474346","Type":"ContainerStarted","Data":"1ba967a4288364aab99e4f914e61bbd6fbb2f73bd1f6cbfe752fa7fe7948fff2"} Jan 28 17:48:36 crc kubenswrapper[4811]: I0128 17:48:36.527893 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" podStartSLOduration=2.119044309 podStartE2EDuration="2.527873979s" podCreationTimestamp="2026-01-28 17:48:34 +0000 UTC" firstStartedPulling="2026-01-28 17:48:35.613521972 +0000 UTC m=+7408.367885555" lastFinishedPulling="2026-01-28 17:48:36.022351642 +0000 UTC m=+7408.776715225" observedRunningTime="2026-01-28 17:48:36.526546513 +0000 UTC m=+7409.280910096" watchObservedRunningTime="2026-01-28 17:48:36.527873979 +0000 UTC m=+7409.282237562" Jan 28 17:49:03 crc kubenswrapper[4811]: I0128 17:49:03.087147 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:49:03 crc kubenswrapper[4811]: I0128 17:49:03.087829 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:49:33 crc kubenswrapper[4811]: I0128 17:49:33.087710 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:49:33 crc kubenswrapper[4811]: I0128 17:49:33.088355 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.087927 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.089443 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.089510 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.090472 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2b40db7ba92973c78c86379b0152ed793614e93e3ce55a3e2a3a4ff04fcc7c9"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.090542 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://b2b40db7ba92973c78c86379b0152ed793614e93e3ce55a3e2a3a4ff04fcc7c9" gracePeriod=600 Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.357666 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="b2b40db7ba92973c78c86379b0152ed793614e93e3ce55a3e2a3a4ff04fcc7c9" exitCode=0 Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.357745 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"b2b40db7ba92973c78c86379b0152ed793614e93e3ce55a3e2a3a4ff04fcc7c9"} Jan 28 17:50:03 crc kubenswrapper[4811]: I0128 17:50:03.358013 4811 scope.go:117] "RemoveContainer" containerID="1bcd36267aeacda468c2d2ee05b7c0018a25e8599ca1388f8f8967c101d5f7c7" Jan 28 17:50:04 crc kubenswrapper[4811]: I0128 17:50:04.370959 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886"} Jan 28 17:50:09 crc kubenswrapper[4811]: I0128 17:50:09.420579 4811 generic.go:334] "Generic (PLEG): container finished" podID="2758f1a1-4474-4964-98eb-5046d0474346" containerID="006c98b022738421a18eeb25b09c1bf7b22c262d828e10d21affd3967b12a5c1" exitCode=0 Jan 28 17:50:09 crc kubenswrapper[4811]: I0128 17:50:09.420663 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" event={"ID":"2758f1a1-4474-4964-98eb-5046d0474346","Type":"ContainerDied","Data":"006c98b022738421a18eeb25b09c1bf7b22c262d828e10d21affd3967b12a5c1"} Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.884368 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.979515 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-inventory\") pod \"2758f1a1-4474-4964-98eb-5046d0474346\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.979654 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtqhg\" (UniqueName: \"kubernetes.io/projected/2758f1a1-4474-4964-98eb-5046d0474346-kube-api-access-xtqhg\") pod \"2758f1a1-4474-4964-98eb-5046d0474346\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.979804 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ssh-key-openstack-cell1\") pod \"2758f1a1-4474-4964-98eb-5046d0474346\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.979885 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ceph\") pod \"2758f1a1-4474-4964-98eb-5046d0474346\" (UID: \"2758f1a1-4474-4964-98eb-5046d0474346\") " Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.985058 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2758f1a1-4474-4964-98eb-5046d0474346-kube-api-access-xtqhg" (OuterVolumeSpecName: "kube-api-access-xtqhg") pod "2758f1a1-4474-4964-98eb-5046d0474346" (UID: "2758f1a1-4474-4964-98eb-5046d0474346"). InnerVolumeSpecName "kube-api-access-xtqhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:50:10 crc kubenswrapper[4811]: I0128 17:50:10.996815 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ceph" (OuterVolumeSpecName: "ceph") pod "2758f1a1-4474-4964-98eb-5046d0474346" (UID: "2758f1a1-4474-4964-98eb-5046d0474346"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.012102 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "2758f1a1-4474-4964-98eb-5046d0474346" (UID: "2758f1a1-4474-4964-98eb-5046d0474346"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.018312 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-inventory" (OuterVolumeSpecName: "inventory") pod "2758f1a1-4474-4964-98eb-5046d0474346" (UID: "2758f1a1-4474-4964-98eb-5046d0474346"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.082560 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.082610 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtqhg\" (UniqueName: \"kubernetes.io/projected/2758f1a1-4474-4964-98eb-5046d0474346-kube-api-access-xtqhg\") on node \"crc\" DevicePath \"\"" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.082626 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.082638 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2758f1a1-4474-4964-98eb-5046d0474346-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.441204 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" event={"ID":"2758f1a1-4474-4964-98eb-5046d0474346","Type":"ContainerDied","Data":"1ba967a4288364aab99e4f914e61bbd6fbb2f73bd1f6cbfe752fa7fe7948fff2"} Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.441525 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ba967a4288364aab99e4f914e61bbd6fbb2f73bd1f6cbfe752fa7fe7948fff2" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.441277 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-v5bff" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.523778 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-sdml6"] Jan 28 17:50:11 crc kubenswrapper[4811]: E0128 17:50:11.524324 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2758f1a1-4474-4964-98eb-5046d0474346" containerName="download-cache-openstack-openstack-cell1" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.524346 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2758f1a1-4474-4964-98eb-5046d0474346" containerName="download-cache-openstack-openstack-cell1" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.524658 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2758f1a1-4474-4964-98eb-5046d0474346" containerName="download-cache-openstack-openstack-cell1" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.525539 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.529223 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.529471 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.529489 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.529567 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.538850 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-sdml6"] Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.596157 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8ttx\" (UniqueName: \"kubernetes.io/projected/15a95376-2b63-44f6-aca3-aef9692a3fdd-kube-api-access-w8ttx\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.596311 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ceph\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.596407 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-inventory\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.596537 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.698885 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ceph\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.699009 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-inventory\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.699119 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.699267 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8ttx\" (UniqueName: \"kubernetes.io/projected/15a95376-2b63-44f6-aca3-aef9692a3fdd-kube-api-access-w8ttx\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.703825 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.704215 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-inventory\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.709706 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ceph\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.718085 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8ttx\" (UniqueName: \"kubernetes.io/projected/15a95376-2b63-44f6-aca3-aef9692a3fdd-kube-api-access-w8ttx\") pod \"configure-network-openstack-openstack-cell1-sdml6\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:11 crc kubenswrapper[4811]: I0128 17:50:11.851970 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:50:12 crc kubenswrapper[4811]: I0128 17:50:12.451315 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-sdml6"] Jan 28 17:50:13 crc kubenswrapper[4811]: I0128 17:50:13.494316 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" event={"ID":"15a95376-2b63-44f6-aca3-aef9692a3fdd","Type":"ContainerStarted","Data":"3d67d22d6a6d5ea4bb103a063405171464d540280ff929827a1261ab672d1dc4"} Jan 28 17:50:13 crc kubenswrapper[4811]: I0128 17:50:13.494865 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" event={"ID":"15a95376-2b63-44f6-aca3-aef9692a3fdd","Type":"ContainerStarted","Data":"e39ac405e297fd3e00a506c88e160747c091a5fb16af62b4ea033d3647a53f4a"} Jan 28 17:50:13 crc kubenswrapper[4811]: I0128 17:50:13.513039 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" podStartSLOduration=1.885453272 podStartE2EDuration="2.513017669s" podCreationTimestamp="2026-01-28 17:50:11 +0000 UTC" firstStartedPulling="2026-01-28 17:50:12.493880799 +0000 UTC m=+7505.248244382" lastFinishedPulling="2026-01-28 17:50:13.121445196 +0000 UTC m=+7505.875808779" observedRunningTime="2026-01-28 17:50:13.511265492 +0000 UTC m=+7506.265629085" watchObservedRunningTime="2026-01-28 17:50:13.513017669 +0000 UTC m=+7506.267381252" Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.288535 4811 generic.go:334] "Generic (PLEG): container finished" podID="15a95376-2b63-44f6-aca3-aef9692a3fdd" containerID="3d67d22d6a6d5ea4bb103a063405171464d540280ff929827a1261ab672d1dc4" exitCode=0 Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.288764 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" event={"ID":"15a95376-2b63-44f6-aca3-aef9692a3fdd","Type":"ContainerDied","Data":"3d67d22d6a6d5ea4bb103a063405171464d540280ff929827a1261ab672d1dc4"} Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.887259 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ttwzb"] Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.890205 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.906354 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ttwzb"] Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.981662 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-utilities\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.981753 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-catalog-content\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:35 crc kubenswrapper[4811]: I0128 17:51:35.981932 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np26v\" (UniqueName: \"kubernetes.io/projected/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-kube-api-access-np26v\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.083813 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-utilities\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.084168 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-catalog-content\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.084400 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-utilities\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.084410 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np26v\" (UniqueName: \"kubernetes.io/projected/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-kube-api-access-np26v\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.084787 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-catalog-content\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.107561 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np26v\" (UniqueName: \"kubernetes.io/projected/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-kube-api-access-np26v\") pod \"community-operators-ttwzb\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.251524 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:36 crc kubenswrapper[4811]: I0128 17:51:36.823221 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ttwzb"] Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.022392 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.113529 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-inventory\") pod \"15a95376-2b63-44f6-aca3-aef9692a3fdd\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.114326 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ceph\") pod \"15a95376-2b63-44f6-aca3-aef9692a3fdd\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.114403 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ssh-key-openstack-cell1\") pod \"15a95376-2b63-44f6-aca3-aef9692a3fdd\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.114458 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8ttx\" (UniqueName: \"kubernetes.io/projected/15a95376-2b63-44f6-aca3-aef9692a3fdd-kube-api-access-w8ttx\") pod \"15a95376-2b63-44f6-aca3-aef9692a3fdd\" (UID: \"15a95376-2b63-44f6-aca3-aef9692a3fdd\") " Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.120292 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ceph" (OuterVolumeSpecName: "ceph") pod "15a95376-2b63-44f6-aca3-aef9692a3fdd" (UID: "15a95376-2b63-44f6-aca3-aef9692a3fdd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.121214 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a95376-2b63-44f6-aca3-aef9692a3fdd-kube-api-access-w8ttx" (OuterVolumeSpecName: "kube-api-access-w8ttx") pod "15a95376-2b63-44f6-aca3-aef9692a3fdd" (UID: "15a95376-2b63-44f6-aca3-aef9692a3fdd"). InnerVolumeSpecName "kube-api-access-w8ttx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.151315 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-inventory" (OuterVolumeSpecName: "inventory") pod "15a95376-2b63-44f6-aca3-aef9692a3fdd" (UID: "15a95376-2b63-44f6-aca3-aef9692a3fdd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.152286 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "15a95376-2b63-44f6-aca3-aef9692a3fdd" (UID: "15a95376-2b63-44f6-aca3-aef9692a3fdd"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.216860 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.216905 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.216922 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8ttx\" (UniqueName: \"kubernetes.io/projected/15a95376-2b63-44f6-aca3-aef9692a3fdd-kube-api-access-w8ttx\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.216935 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a95376-2b63-44f6-aca3-aef9692a3fdd-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.334585 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" event={"ID":"15a95376-2b63-44f6-aca3-aef9692a3fdd","Type":"ContainerDied","Data":"e39ac405e297fd3e00a506c88e160747c091a5fb16af62b4ea033d3647a53f4a"} Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.334854 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e39ac405e297fd3e00a506c88e160747c091a5fb16af62b4ea033d3647a53f4a" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.334690 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-sdml6" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.349241 4811 generic.go:334] "Generic (PLEG): container finished" podID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerID="75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f" exitCode=0 Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.349309 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerDied","Data":"75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f"} Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.349357 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerStarted","Data":"0965d61baece15e3b7ace0afee163c0a86f8321554f65d7298a8f70cca8b0b12"} Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.405295 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-4mwzh"] Jan 28 17:51:37 crc kubenswrapper[4811]: E0128 17:51:37.405731 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a95376-2b63-44f6-aca3-aef9692a3fdd" containerName="configure-network-openstack-openstack-cell1" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.405748 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a95376-2b63-44f6-aca3-aef9692a3fdd" containerName="configure-network-openstack-openstack-cell1" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.405941 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a95376-2b63-44f6-aca3-aef9692a3fdd" containerName="configure-network-openstack-openstack-cell1" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.406714 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.411876 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.412005 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.412338 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.412348 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.429549 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-4mwzh"] Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.522978 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.523504 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hvk7\" (UniqueName: \"kubernetes.io/projected/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-kube-api-access-8hvk7\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.523531 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-inventory\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.523549 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ceph\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.626136 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hvk7\" (UniqueName: \"kubernetes.io/projected/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-kube-api-access-8hvk7\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.626202 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-inventory\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.626228 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ceph\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.626366 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.632393 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-inventory\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.632454 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.632942 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ceph\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.643323 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hvk7\" (UniqueName: \"kubernetes.io/projected/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-kube-api-access-8hvk7\") pod \"validate-network-openstack-openstack-cell1-4mwzh\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:37 crc kubenswrapper[4811]: I0128 17:51:37.726490 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:38 crc kubenswrapper[4811]: I0128 17:51:38.596307 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-4mwzh"] Jan 28 17:51:39 crc kubenswrapper[4811]: I0128 17:51:39.403263 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" event={"ID":"d86a0acd-0347-442a-b9dc-ccbd1ef895e7","Type":"ContainerStarted","Data":"22dec83ade7a8ecff8235871d5a571e0f1daf2190e2a963e201a3737692ef146"} Jan 28 17:51:39 crc kubenswrapper[4811]: I0128 17:51:39.407494 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerStarted","Data":"884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd"} Jan 28 17:51:40 crc kubenswrapper[4811]: I0128 17:51:40.419469 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" event={"ID":"d86a0acd-0347-442a-b9dc-ccbd1ef895e7","Type":"ContainerStarted","Data":"e63322f631050a49413e10eda3cd04665d68bc1983b2c24cb521125609db87e4"} Jan 28 17:51:40 crc kubenswrapper[4811]: I0128 17:51:40.443316 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" podStartSLOduration=2.460566179 podStartE2EDuration="3.443299905s" podCreationTimestamp="2026-01-28 17:51:37 +0000 UTC" firstStartedPulling="2026-01-28 17:51:38.604849529 +0000 UTC m=+7591.359213122" lastFinishedPulling="2026-01-28 17:51:39.587583265 +0000 UTC m=+7592.341946848" observedRunningTime="2026-01-28 17:51:40.440966401 +0000 UTC m=+7593.195329984" watchObservedRunningTime="2026-01-28 17:51:40.443299905 +0000 UTC m=+7593.197663488" Jan 28 17:51:41 crc kubenswrapper[4811]: I0128 17:51:41.430695 4811 generic.go:334] "Generic (PLEG): container finished" podID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerID="884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd" exitCode=0 Jan 28 17:51:41 crc kubenswrapper[4811]: I0128 17:51:41.430846 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerDied","Data":"884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd"} Jan 28 17:51:44 crc kubenswrapper[4811]: I0128 17:51:44.474339 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerStarted","Data":"d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c"} Jan 28 17:51:44 crc kubenswrapper[4811]: I0128 17:51:44.501040 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ttwzb" podStartSLOduration=3.411413446 podStartE2EDuration="9.501019809s" podCreationTimestamp="2026-01-28 17:51:35 +0000 UTC" firstStartedPulling="2026-01-28 17:51:37.35105183 +0000 UTC m=+7590.105415413" lastFinishedPulling="2026-01-28 17:51:43.440658203 +0000 UTC m=+7596.195021776" observedRunningTime="2026-01-28 17:51:44.490993478 +0000 UTC m=+7597.245357071" watchObservedRunningTime="2026-01-28 17:51:44.501019809 +0000 UTC m=+7597.255383392" Jan 28 17:51:45 crc kubenswrapper[4811]: I0128 17:51:45.485291 4811 generic.go:334] "Generic (PLEG): container finished" podID="d86a0acd-0347-442a-b9dc-ccbd1ef895e7" containerID="e63322f631050a49413e10eda3cd04665d68bc1983b2c24cb521125609db87e4" exitCode=0 Jan 28 17:51:45 crc kubenswrapper[4811]: I0128 17:51:45.485390 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" event={"ID":"d86a0acd-0347-442a-b9dc-ccbd1ef895e7","Type":"ContainerDied","Data":"e63322f631050a49413e10eda3cd04665d68bc1983b2c24cb521125609db87e4"} Jan 28 17:51:46 crc kubenswrapper[4811]: I0128 17:51:46.252302 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:46 crc kubenswrapper[4811]: I0128 17:51:46.252780 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:46 crc kubenswrapper[4811]: I0128 17:51:46.296575 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:46 crc kubenswrapper[4811]: I0128 17:51:46.922121 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.052724 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-inventory\") pod \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.052878 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ceph\") pod \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.052988 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ssh-key-openstack-cell1\") pod \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.053285 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hvk7\" (UniqueName: \"kubernetes.io/projected/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-kube-api-access-8hvk7\") pod \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\" (UID: \"d86a0acd-0347-442a-b9dc-ccbd1ef895e7\") " Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.059933 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ceph" (OuterVolumeSpecName: "ceph") pod "d86a0acd-0347-442a-b9dc-ccbd1ef895e7" (UID: "d86a0acd-0347-442a-b9dc-ccbd1ef895e7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.061566 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-kube-api-access-8hvk7" (OuterVolumeSpecName: "kube-api-access-8hvk7") pod "d86a0acd-0347-442a-b9dc-ccbd1ef895e7" (UID: "d86a0acd-0347-442a-b9dc-ccbd1ef895e7"). InnerVolumeSpecName "kube-api-access-8hvk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.090320 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "d86a0acd-0347-442a-b9dc-ccbd1ef895e7" (UID: "d86a0acd-0347-442a-b9dc-ccbd1ef895e7"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.090351 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-inventory" (OuterVolumeSpecName: "inventory") pod "d86a0acd-0347-442a-b9dc-ccbd1ef895e7" (UID: "d86a0acd-0347-442a-b9dc-ccbd1ef895e7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.155413 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.155470 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.155485 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hvk7\" (UniqueName: \"kubernetes.io/projected/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-kube-api-access-8hvk7\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.155494 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d86a0acd-0347-442a-b9dc-ccbd1ef895e7-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.507371 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" event={"ID":"d86a0acd-0347-442a-b9dc-ccbd1ef895e7","Type":"ContainerDied","Data":"22dec83ade7a8ecff8235871d5a571e0f1daf2190e2a963e201a3737692ef146"} Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.507422 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22dec83ade7a8ecff8235871d5a571e0f1daf2190e2a963e201a3737692ef146" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.507388 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4mwzh" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.633749 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-q85zb"] Jan 28 17:51:47 crc kubenswrapper[4811]: E0128 17:51:47.634329 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d86a0acd-0347-442a-b9dc-ccbd1ef895e7" containerName="validate-network-openstack-openstack-cell1" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.634352 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d86a0acd-0347-442a-b9dc-ccbd1ef895e7" containerName="validate-network-openstack-openstack-cell1" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.634612 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d86a0acd-0347-442a-b9dc-ccbd1ef895e7" containerName="validate-network-openstack-openstack-cell1" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.635576 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.638758 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.639031 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.639077 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.639257 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.647730 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-q85zb"] Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.769941 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ceph\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.770218 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfg8j\" (UniqueName: \"kubernetes.io/projected/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-kube-api-access-bfg8j\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.770305 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.770366 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-inventory\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.873089 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ceph\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.873454 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfg8j\" (UniqueName: \"kubernetes.io/projected/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-kube-api-access-bfg8j\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.873701 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.873876 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-inventory\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.877371 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-inventory\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.877618 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.877972 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ceph\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.891033 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfg8j\" (UniqueName: \"kubernetes.io/projected/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-kube-api-access-bfg8j\") pod \"install-os-openstack-openstack-cell1-q85zb\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:47 crc kubenswrapper[4811]: I0128 17:51:47.953498 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:51:48 crc kubenswrapper[4811]: I0128 17:51:48.527475 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-q85zb"] Jan 28 17:51:49 crc kubenswrapper[4811]: I0128 17:51:49.530772 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q85zb" event={"ID":"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a","Type":"ContainerStarted","Data":"e3d52f9e3a6a46078d08b4fe3551754fd60ba82a21d3d6d70f3461b94312a2a9"} Jan 28 17:51:50 crc kubenswrapper[4811]: I0128 17:51:50.541415 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q85zb" event={"ID":"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a","Type":"ContainerStarted","Data":"7b868848877200fbcc97064d98dbaef6fd27bc974eff28ed281dc3efc9d06667"} Jan 28 17:51:50 crc kubenswrapper[4811]: I0128 17:51:50.580696 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-q85zb" podStartSLOduration=2.869988026 podStartE2EDuration="3.580672482s" podCreationTimestamp="2026-01-28 17:51:47 +0000 UTC" firstStartedPulling="2026-01-28 17:51:48.542027841 +0000 UTC m=+7601.296391424" lastFinishedPulling="2026-01-28 17:51:49.252712297 +0000 UTC m=+7602.007075880" observedRunningTime="2026-01-28 17:51:50.559603202 +0000 UTC m=+7603.313966785" watchObservedRunningTime="2026-01-28 17:51:50.580672482 +0000 UTC m=+7603.335036065" Jan 28 17:51:56 crc kubenswrapper[4811]: I0128 17:51:56.301675 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:56 crc kubenswrapper[4811]: I0128 17:51:56.370726 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ttwzb"] Jan 28 17:51:56 crc kubenswrapper[4811]: I0128 17:51:56.595800 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ttwzb" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="registry-server" containerID="cri-o://d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c" gracePeriod=2 Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.134026 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.273503 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-utilities\") pod \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.273750 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-catalog-content\") pod \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.273836 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np26v\" (UniqueName: \"kubernetes.io/projected/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-kube-api-access-np26v\") pod \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\" (UID: \"a70bc9d5-a38b-4078-8d84-0cb1e60c3055\") " Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.274495 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-utilities" (OuterVolumeSpecName: "utilities") pod "a70bc9d5-a38b-4078-8d84-0cb1e60c3055" (UID: "a70bc9d5-a38b-4078-8d84-0cb1e60c3055"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.281660 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-kube-api-access-np26v" (OuterVolumeSpecName: "kube-api-access-np26v") pod "a70bc9d5-a38b-4078-8d84-0cb1e60c3055" (UID: "a70bc9d5-a38b-4078-8d84-0cb1e60c3055"). InnerVolumeSpecName "kube-api-access-np26v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.331544 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a70bc9d5-a38b-4078-8d84-0cb1e60c3055" (UID: "a70bc9d5-a38b-4078-8d84-0cb1e60c3055"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.376587 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.376623 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np26v\" (UniqueName: \"kubernetes.io/projected/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-kube-api-access-np26v\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.376637 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a70bc9d5-a38b-4078-8d84-0cb1e60c3055-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.604952 4811 generic.go:334] "Generic (PLEG): container finished" podID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerID="d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c" exitCode=0 Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.604999 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerDied","Data":"d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c"} Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.605065 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttwzb" event={"ID":"a70bc9d5-a38b-4078-8d84-0cb1e60c3055","Type":"ContainerDied","Data":"0965d61baece15e3b7ace0afee163c0a86f8321554f65d7298a8f70cca8b0b12"} Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.605084 4811 scope.go:117] "RemoveContainer" containerID="d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.605377 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttwzb" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.632277 4811 scope.go:117] "RemoveContainer" containerID="884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.645684 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ttwzb"] Jan 28 17:51:57 crc kubenswrapper[4811]: E0128 17:51:57.645739 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda70bc9d5_a38b_4078_8d84_0cb1e60c3055.slice\": RecentStats: unable to find data in memory cache]" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.658165 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ttwzb"] Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.681002 4811 scope.go:117] "RemoveContainer" containerID="75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.731888 4811 scope.go:117] "RemoveContainer" containerID="d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c" Jan 28 17:51:57 crc kubenswrapper[4811]: E0128 17:51:57.733013 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c\": container with ID starting with d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c not found: ID does not exist" containerID="d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.733054 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c"} err="failed to get container status \"d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c\": rpc error: code = NotFound desc = could not find container \"d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c\": container with ID starting with d7785d58d46ea9a1d080fd4b1c809caaadd0d9d0fc51f24759a7d1ab07925d9c not found: ID does not exist" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.733081 4811 scope.go:117] "RemoveContainer" containerID="884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd" Jan 28 17:51:57 crc kubenswrapper[4811]: E0128 17:51:57.733445 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd\": container with ID starting with 884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd not found: ID does not exist" containerID="884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.733472 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd"} err="failed to get container status \"884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd\": rpc error: code = NotFound desc = could not find container \"884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd\": container with ID starting with 884488fbbeec822791d836463e1470abf23c557addf56f45713c1d735ee5aadd not found: ID does not exist" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.733489 4811 scope.go:117] "RemoveContainer" containerID="75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f" Jan 28 17:51:57 crc kubenswrapper[4811]: E0128 17:51:57.733955 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f\": container with ID starting with 75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f not found: ID does not exist" containerID="75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f" Jan 28 17:51:57 crc kubenswrapper[4811]: I0128 17:51:57.734001 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f"} err="failed to get container status \"75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f\": rpc error: code = NotFound desc = could not find container \"75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f\": container with ID starting with 75aaaa39149cda659a9a6e41829c9791903c38c44cdc92cb1b301fc532d88a8f not found: ID does not exist" Jan 28 17:51:58 crc kubenswrapper[4811]: I0128 17:51:58.358352 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" path="/var/lib/kubelet/pods/a70bc9d5-a38b-4078-8d84-0cb1e60c3055/volumes" Jan 28 17:52:03 crc kubenswrapper[4811]: I0128 17:52:03.087255 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:52:03 crc kubenswrapper[4811]: I0128 17:52:03.087797 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:52:33 crc kubenswrapper[4811]: I0128 17:52:33.087476 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:52:33 crc kubenswrapper[4811]: I0128 17:52:33.087980 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:52:33 crc kubenswrapper[4811]: I0128 17:52:33.988596 4811 generic.go:334] "Generic (PLEG): container finished" podID="93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" containerID="7b868848877200fbcc97064d98dbaef6fd27bc974eff28ed281dc3efc9d06667" exitCode=0 Jan 28 17:52:33 crc kubenswrapper[4811]: I0128 17:52:33.988828 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q85zb" event={"ID":"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a","Type":"ContainerDied","Data":"7b868848877200fbcc97064d98dbaef6fd27bc974eff28ed281dc3efc9d06667"} Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.556110 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.729787 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jpc7c"] Jan 28 17:52:35 crc kubenswrapper[4811]: E0128 17:52:35.730340 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="registry-server" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.730361 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="registry-server" Jan 28 17:52:35 crc kubenswrapper[4811]: E0128 17:52:35.730391 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="extract-content" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.730400 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="extract-content" Jan 28 17:52:35 crc kubenswrapper[4811]: E0128 17:52:35.731467 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" containerName="install-os-openstack-openstack-cell1" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.731488 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" containerName="install-os-openstack-openstack-cell1" Jan 28 17:52:35 crc kubenswrapper[4811]: E0128 17:52:35.731507 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="extract-utilities" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.731517 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="extract-utilities" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.731821 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" containerName="install-os-openstack-openstack-cell1" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.731850 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="a70bc9d5-a38b-4078-8d84-0cb1e60c3055" containerName="registry-server" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.733704 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ssh-key-openstack-cell1\") pod \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.733841 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-inventory\") pod \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.733879 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfg8j\" (UniqueName: \"kubernetes.io/projected/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-kube-api-access-bfg8j\") pod \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.734017 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ceph\") pod \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\" (UID: \"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a\") " Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.734825 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.739825 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-kube-api-access-bfg8j" (OuterVolumeSpecName: "kube-api-access-bfg8j") pod "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" (UID: "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a"). InnerVolumeSpecName "kube-api-access-bfg8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.776615 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ceph" (OuterVolumeSpecName: "ceph") pod "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" (UID: "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.777146 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpc7c"] Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.805703 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" (UID: "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.836193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-catalog-content\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.836309 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlv6m\" (UniqueName: \"kubernetes.io/projected/67fdb606-03f0-47b1-9bba-73a00c14248d-kube-api-access-zlv6m\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.836347 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-utilities\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.836454 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.836471 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfg8j\" (UniqueName: \"kubernetes.io/projected/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-kube-api-access-bfg8j\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.836483 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.845494 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-inventory" (OuterVolumeSpecName: "inventory") pod "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a" (UID: "93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.937883 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-catalog-content\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.938008 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlv6m\" (UniqueName: \"kubernetes.io/projected/67fdb606-03f0-47b1-9bba-73a00c14248d-kube-api-access-zlv6m\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.938063 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-utilities\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.938287 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.938520 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-catalog-content\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.938536 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-utilities\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:35 crc kubenswrapper[4811]: I0128 17:52:35.955908 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlv6m\" (UniqueName: \"kubernetes.io/projected/67fdb606-03f0-47b1-9bba-73a00c14248d-kube-api-access-zlv6m\") pod \"redhat-marketplace-jpc7c\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.008867 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q85zb" event={"ID":"93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a","Type":"ContainerDied","Data":"e3d52f9e3a6a46078d08b4fe3551754fd60ba82a21d3d6d70f3461b94312a2a9"} Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.008909 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3d52f9e3a6a46078d08b4fe3551754fd60ba82a21d3d6d70f3461b94312a2a9" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.008949 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q85zb" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.098528 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-jwm6n"] Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.100071 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.102200 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.102462 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.102621 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.102775 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.108175 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-jwm6n"] Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.226845 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.246456 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ceph\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.246570 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kvlh\" (UniqueName: \"kubernetes.io/projected/04a4df18-f769-4741-91e3-734245e3f14f-kube-api-access-6kvlh\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.246701 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.246734 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-inventory\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.348210 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.348632 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-inventory\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.348777 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ceph\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.348933 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kvlh\" (UniqueName: \"kubernetes.io/projected/04a4df18-f769-4741-91e3-734245e3f14f-kube-api-access-6kvlh\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.353217 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.353818 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ceph\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.353883 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-inventory\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.365526 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kvlh\" (UniqueName: \"kubernetes.io/projected/04a4df18-f769-4741-91e3-734245e3f14f-kube-api-access-6kvlh\") pod \"configure-os-openstack-openstack-cell1-jwm6n\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.417447 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:52:36 crc kubenswrapper[4811]: I0128 17:52:36.692615 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpc7c"] Jan 28 17:52:37 crc kubenswrapper[4811]: I0128 17:52:37.015681 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-jwm6n"] Jan 28 17:52:37 crc kubenswrapper[4811]: I0128 17:52:37.020767 4811 generic.go:334] "Generic (PLEG): container finished" podID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerID="143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8" exitCode=0 Jan 28 17:52:37 crc kubenswrapper[4811]: I0128 17:52:37.020815 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerDied","Data":"143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8"} Jan 28 17:52:37 crc kubenswrapper[4811]: I0128 17:52:37.020842 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerStarted","Data":"341ba3823484062f40b06c6f97ab61421fd76a3a59a3c2b87843042421e95e5d"} Jan 28 17:52:37 crc kubenswrapper[4811]: W0128 17:52:37.047043 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a4df18_f769_4741_91e3_734245e3f14f.slice/crio-5b0bb8f0e5a0f6237776aed52c5101198daa7baaa95811f58de110bdcf90e7d0 WatchSource:0}: Error finding container 5b0bb8f0e5a0f6237776aed52c5101198daa7baaa95811f58de110bdcf90e7d0: Status 404 returned error can't find the container with id 5b0bb8f0e5a0f6237776aed52c5101198daa7baaa95811f58de110bdcf90e7d0 Jan 28 17:52:38 crc kubenswrapper[4811]: I0128 17:52:38.037419 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" event={"ID":"04a4df18-f769-4741-91e3-734245e3f14f","Type":"ContainerStarted","Data":"5b0bb8f0e5a0f6237776aed52c5101198daa7baaa95811f58de110bdcf90e7d0"} Jan 28 17:52:39 crc kubenswrapper[4811]: I0128 17:52:39.047525 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" event={"ID":"04a4df18-f769-4741-91e3-734245e3f14f","Type":"ContainerStarted","Data":"108d08f8073375d1dab65a7e4ff77cff3464ffe87b71bbda4152950b8a099c11"} Jan 28 17:52:39 crc kubenswrapper[4811]: I0128 17:52:39.050188 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerStarted","Data":"6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362"} Jan 28 17:52:39 crc kubenswrapper[4811]: I0128 17:52:39.075568 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" podStartSLOduration=2.215646478 podStartE2EDuration="3.075541231s" podCreationTimestamp="2026-01-28 17:52:36 +0000 UTC" firstStartedPulling="2026-01-28 17:52:37.049372167 +0000 UTC m=+7649.803735750" lastFinishedPulling="2026-01-28 17:52:37.90926691 +0000 UTC m=+7650.663630503" observedRunningTime="2026-01-28 17:52:39.069051845 +0000 UTC m=+7651.823415438" watchObservedRunningTime="2026-01-28 17:52:39.075541231 +0000 UTC m=+7651.829904814" Jan 28 17:52:41 crc kubenswrapper[4811]: I0128 17:52:41.071329 4811 generic.go:334] "Generic (PLEG): container finished" podID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerID="6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362" exitCode=0 Jan 28 17:52:41 crc kubenswrapper[4811]: I0128 17:52:41.071397 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerDied","Data":"6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362"} Jan 28 17:52:42 crc kubenswrapper[4811]: I0128 17:52:42.084424 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerStarted","Data":"4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6"} Jan 28 17:52:42 crc kubenswrapper[4811]: I0128 17:52:42.108771 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jpc7c" podStartSLOduration=2.5658822199999998 podStartE2EDuration="7.108750268s" podCreationTimestamp="2026-01-28 17:52:35 +0000 UTC" firstStartedPulling="2026-01-28 17:52:37.022696455 +0000 UTC m=+7649.777060038" lastFinishedPulling="2026-01-28 17:52:41.565564503 +0000 UTC m=+7654.319928086" observedRunningTime="2026-01-28 17:52:42.103668061 +0000 UTC m=+7654.858031654" watchObservedRunningTime="2026-01-28 17:52:42.108750268 +0000 UTC m=+7654.863113851" Jan 28 17:52:46 crc kubenswrapper[4811]: I0128 17:52:46.227559 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:46 crc kubenswrapper[4811]: I0128 17:52:46.228162 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:46 crc kubenswrapper[4811]: I0128 17:52:46.277016 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:47 crc kubenswrapper[4811]: I0128 17:52:47.224884 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:47 crc kubenswrapper[4811]: I0128 17:52:47.279942 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpc7c"] Jan 28 17:52:49 crc kubenswrapper[4811]: I0128 17:52:49.152019 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jpc7c" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="registry-server" containerID="cri-o://4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6" gracePeriod=2 Jan 28 17:52:49 crc kubenswrapper[4811]: I0128 17:52:49.812345 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:49 crc kubenswrapper[4811]: I0128 17:52:49.996688 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlv6m\" (UniqueName: \"kubernetes.io/projected/67fdb606-03f0-47b1-9bba-73a00c14248d-kube-api-access-zlv6m\") pod \"67fdb606-03f0-47b1-9bba-73a00c14248d\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " Jan 28 17:52:49 crc kubenswrapper[4811]: I0128 17:52:49.998762 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-utilities\") pod \"67fdb606-03f0-47b1-9bba-73a00c14248d\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " Jan 28 17:52:49 crc kubenswrapper[4811]: I0128 17:52:49.998945 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-catalog-content\") pod \"67fdb606-03f0-47b1-9bba-73a00c14248d\" (UID: \"67fdb606-03f0-47b1-9bba-73a00c14248d\") " Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.000379 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-utilities" (OuterVolumeSpecName: "utilities") pod "67fdb606-03f0-47b1-9bba-73a00c14248d" (UID: "67fdb606-03f0-47b1-9bba-73a00c14248d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.003346 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67fdb606-03f0-47b1-9bba-73a00c14248d-kube-api-access-zlv6m" (OuterVolumeSpecName: "kube-api-access-zlv6m") pod "67fdb606-03f0-47b1-9bba-73a00c14248d" (UID: "67fdb606-03f0-47b1-9bba-73a00c14248d"). InnerVolumeSpecName "kube-api-access-zlv6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.024381 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67fdb606-03f0-47b1-9bba-73a00c14248d" (UID: "67fdb606-03f0-47b1-9bba-73a00c14248d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.101956 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.102199 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67fdb606-03f0-47b1-9bba-73a00c14248d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.102259 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlv6m\" (UniqueName: \"kubernetes.io/projected/67fdb606-03f0-47b1-9bba-73a00c14248d-kube-api-access-zlv6m\") on node \"crc\" DevicePath \"\"" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.165900 4811 generic.go:334] "Generic (PLEG): container finished" podID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerID="4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6" exitCode=0 Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.165971 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerDied","Data":"4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6"} Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.165993 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpc7c" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.166054 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpc7c" event={"ID":"67fdb606-03f0-47b1-9bba-73a00c14248d","Type":"ContainerDied","Data":"341ba3823484062f40b06c6f97ab61421fd76a3a59a3c2b87843042421e95e5d"} Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.166088 4811 scope.go:117] "RemoveContainer" containerID="4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.194361 4811 scope.go:117] "RemoveContainer" containerID="6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.206911 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpc7c"] Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.215800 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpc7c"] Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.223385 4811 scope.go:117] "RemoveContainer" containerID="143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.273018 4811 scope.go:117] "RemoveContainer" containerID="4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6" Jan 28 17:52:50 crc kubenswrapper[4811]: E0128 17:52:50.273913 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6\": container with ID starting with 4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6 not found: ID does not exist" containerID="4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.273949 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6"} err="failed to get container status \"4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6\": rpc error: code = NotFound desc = could not find container \"4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6\": container with ID starting with 4e710d6c576f26b55d0c119c620535d2ca7499c3d492e6538f5e939ea5556bf6 not found: ID does not exist" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.273975 4811 scope.go:117] "RemoveContainer" containerID="6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362" Jan 28 17:52:50 crc kubenswrapper[4811]: E0128 17:52:50.274611 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362\": container with ID starting with 6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362 not found: ID does not exist" containerID="6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.274666 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362"} err="failed to get container status \"6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362\": rpc error: code = NotFound desc = could not find container \"6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362\": container with ID starting with 6625fe0bb74ac7f47cac81a9a893842e088fea120d53a0632e4939c1d6705362 not found: ID does not exist" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.274697 4811 scope.go:117] "RemoveContainer" containerID="143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8" Jan 28 17:52:50 crc kubenswrapper[4811]: E0128 17:52:50.275048 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8\": container with ID starting with 143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8 not found: ID does not exist" containerID="143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.275080 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8"} err="failed to get container status \"143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8\": rpc error: code = NotFound desc = could not find container \"143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8\": container with ID starting with 143fff6e446d47f8ef043f1eeba458c8c8c30b6aebd82820fe2eccdbd44478a8 not found: ID does not exist" Jan 28 17:52:50 crc kubenswrapper[4811]: I0128 17:52:50.352361 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" path="/var/lib/kubelet/pods/67fdb606-03f0-47b1-9bba-73a00c14248d/volumes" Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.086946 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.087403 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.087458 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.088004 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.088055 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" gracePeriod=600 Jan 28 17:53:03 crc kubenswrapper[4811]: E0128 17:53:03.239005 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.318162 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" exitCode=0 Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.318205 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886"} Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.318551 4811 scope.go:117] "RemoveContainer" containerID="b2b40db7ba92973c78c86379b0152ed793614e93e3ce55a3e2a3a4ff04fcc7c9" Jan 28 17:53:03 crc kubenswrapper[4811]: I0128 17:53:03.319313 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:53:03 crc kubenswrapper[4811]: E0128 17:53:03.319650 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:53:15 crc kubenswrapper[4811]: I0128 17:53:15.340964 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:53:15 crc kubenswrapper[4811]: E0128 17:53:15.341741 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:53:23 crc kubenswrapper[4811]: I0128 17:53:23.799628 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ceilometer-0" podUID="840f9dd7-e272-4eca-8c71-8b8b27a34ef3" containerName="ceilometer-central-agent" probeResult="failure" output="command timed out" Jan 28 17:53:24 crc kubenswrapper[4811]: I0128 17:53:24.544619 4811 generic.go:334] "Generic (PLEG): container finished" podID="04a4df18-f769-4741-91e3-734245e3f14f" containerID="108d08f8073375d1dab65a7e4ff77cff3464ffe87b71bbda4152950b8a099c11" exitCode=0 Jan 28 17:53:24 crc kubenswrapper[4811]: I0128 17:53:24.544665 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" event={"ID":"04a4df18-f769-4741-91e3-734245e3f14f","Type":"ContainerDied","Data":"108d08f8073375d1dab65a7e4ff77cff3464ffe87b71bbda4152950b8a099c11"} Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.049600 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.117617 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ssh-key-openstack-cell1\") pod \"04a4df18-f769-4741-91e3-734245e3f14f\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.117745 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-inventory\") pod \"04a4df18-f769-4741-91e3-734245e3f14f\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.117780 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kvlh\" (UniqueName: \"kubernetes.io/projected/04a4df18-f769-4741-91e3-734245e3f14f-kube-api-access-6kvlh\") pod \"04a4df18-f769-4741-91e3-734245e3f14f\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.117932 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ceph\") pod \"04a4df18-f769-4741-91e3-734245e3f14f\" (UID: \"04a4df18-f769-4741-91e3-734245e3f14f\") " Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.124300 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ceph" (OuterVolumeSpecName: "ceph") pod "04a4df18-f769-4741-91e3-734245e3f14f" (UID: "04a4df18-f769-4741-91e3-734245e3f14f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.124532 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04a4df18-f769-4741-91e3-734245e3f14f-kube-api-access-6kvlh" (OuterVolumeSpecName: "kube-api-access-6kvlh") pod "04a4df18-f769-4741-91e3-734245e3f14f" (UID: "04a4df18-f769-4741-91e3-734245e3f14f"). InnerVolumeSpecName "kube-api-access-6kvlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.153564 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-inventory" (OuterVolumeSpecName: "inventory") pod "04a4df18-f769-4741-91e3-734245e3f14f" (UID: "04a4df18-f769-4741-91e3-734245e3f14f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.153943 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "04a4df18-f769-4741-91e3-734245e3f14f" (UID: "04a4df18-f769-4741-91e3-734245e3f14f"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.220490 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.220528 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.220539 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kvlh\" (UniqueName: \"kubernetes.io/projected/04a4df18-f769-4741-91e3-734245e3f14f-kube-api-access-6kvlh\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.220548 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/04a4df18-f769-4741-91e3-734245e3f14f-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.562395 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" event={"ID":"04a4df18-f769-4741-91e3-734245e3f14f","Type":"ContainerDied","Data":"5b0bb8f0e5a0f6237776aed52c5101198daa7baaa95811f58de110bdcf90e7d0"} Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.562481 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b0bb8f0e5a0f6237776aed52c5101198daa7baaa95811f58de110bdcf90e7d0" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.562424 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-jwm6n" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708078 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-nxrrf"] Jan 28 17:53:26 crc kubenswrapper[4811]: E0128 17:53:26.708611 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a4df18-f769-4741-91e3-734245e3f14f" containerName="configure-os-openstack-openstack-cell1" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708631 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a4df18-f769-4741-91e3-734245e3f14f" containerName="configure-os-openstack-openstack-cell1" Jan 28 17:53:26 crc kubenswrapper[4811]: E0128 17:53:26.708665 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="extract-content" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708672 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="extract-content" Jan 28 17:53:26 crc kubenswrapper[4811]: E0128 17:53:26.708688 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="registry-server" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708697 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="registry-server" Jan 28 17:53:26 crc kubenswrapper[4811]: E0128 17:53:26.708710 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="extract-utilities" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708717 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="extract-utilities" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708963 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="67fdb606-03f0-47b1-9bba-73a00c14248d" containerName="registry-server" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.708987 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="04a4df18-f769-4741-91e3-734245e3f14f" containerName="configure-os-openstack-openstack-cell1" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.709890 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.716340 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.716658 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.716789 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.716803 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.739107 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-nxrrf"] Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.831586 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzddz\" (UniqueName: \"kubernetes.io/projected/124756d7-a566-4c29-a30a-cb96ddf1dd6b-kube-api-access-xzddz\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.832037 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-inventory-0\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.832167 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.832199 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ceph\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.933862 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzddz\" (UniqueName: \"kubernetes.io/projected/124756d7-a566-4c29-a30a-cb96ddf1dd6b-kube-api-access-xzddz\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.933950 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-inventory-0\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.934021 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.934054 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ceph\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.938707 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.938721 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ceph\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.956200 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-inventory-0\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:26 crc kubenswrapper[4811]: I0128 17:53:26.959005 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzddz\" (UniqueName: \"kubernetes.io/projected/124756d7-a566-4c29-a30a-cb96ddf1dd6b-kube-api-access-xzddz\") pod \"ssh-known-hosts-openstack-nxrrf\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:27 crc kubenswrapper[4811]: I0128 17:53:27.032668 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:27 crc kubenswrapper[4811]: I0128 17:53:27.625821 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-nxrrf"] Jan 28 17:53:28 crc kubenswrapper[4811]: I0128 17:53:28.347036 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:53:28 crc kubenswrapper[4811]: E0128 17:53:28.347604 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:53:28 crc kubenswrapper[4811]: I0128 17:53:28.584712 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-nxrrf" event={"ID":"124756d7-a566-4c29-a30a-cb96ddf1dd6b","Type":"ContainerStarted","Data":"040be24c55889c2b4a3cd7e052c7efaaf4e6625c74ce23eafc57f88ef67cbd5f"} Jan 28 17:53:28 crc kubenswrapper[4811]: I0128 17:53:28.584756 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-nxrrf" event={"ID":"124756d7-a566-4c29-a30a-cb96ddf1dd6b","Type":"ContainerStarted","Data":"9f0a4b5c7f82cd86a084c106b2719325b2e6017deac4991f218fb36c2029840f"} Jan 28 17:53:28 crc kubenswrapper[4811]: I0128 17:53:28.613565 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-nxrrf" podStartSLOduration=2.10453982 podStartE2EDuration="2.613536389s" podCreationTimestamp="2026-01-28 17:53:26 +0000 UTC" firstStartedPulling="2026-01-28 17:53:27.636346323 +0000 UTC m=+7700.390709906" lastFinishedPulling="2026-01-28 17:53:28.145342892 +0000 UTC m=+7700.899706475" observedRunningTime="2026-01-28 17:53:28.606799776 +0000 UTC m=+7701.361163359" watchObservedRunningTime="2026-01-28 17:53:28.613536389 +0000 UTC m=+7701.367899982" Jan 28 17:53:37 crc kubenswrapper[4811]: I0128 17:53:37.656678 4811 generic.go:334] "Generic (PLEG): container finished" podID="124756d7-a566-4c29-a30a-cb96ddf1dd6b" containerID="040be24c55889c2b4a3cd7e052c7efaaf4e6625c74ce23eafc57f88ef67cbd5f" exitCode=0 Jan 28 17:53:37 crc kubenswrapper[4811]: I0128 17:53:37.656764 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-nxrrf" event={"ID":"124756d7-a566-4c29-a30a-cb96ddf1dd6b","Type":"ContainerDied","Data":"040be24c55889c2b4a3cd7e052c7efaaf4e6625c74ce23eafc57f88ef67cbd5f"} Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.337515 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.432463 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzddz\" (UniqueName: \"kubernetes.io/projected/124756d7-a566-4c29-a30a-cb96ddf1dd6b-kube-api-access-xzddz\") pod \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.432582 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-inventory-0\") pod \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.432742 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ceph\") pod \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.432816 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ssh-key-openstack-cell1\") pod \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\" (UID: \"124756d7-a566-4c29-a30a-cb96ddf1dd6b\") " Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.438845 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/124756d7-a566-4c29-a30a-cb96ddf1dd6b-kube-api-access-xzddz" (OuterVolumeSpecName: "kube-api-access-xzddz") pod "124756d7-a566-4c29-a30a-cb96ddf1dd6b" (UID: "124756d7-a566-4c29-a30a-cb96ddf1dd6b"). InnerVolumeSpecName "kube-api-access-xzddz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.451695 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ceph" (OuterVolumeSpecName: "ceph") pod "124756d7-a566-4c29-a30a-cb96ddf1dd6b" (UID: "124756d7-a566-4c29-a30a-cb96ddf1dd6b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.467586 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "124756d7-a566-4c29-a30a-cb96ddf1dd6b" (UID: "124756d7-a566-4c29-a30a-cb96ddf1dd6b"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.496811 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "124756d7-a566-4c29-a30a-cb96ddf1dd6b" (UID: "124756d7-a566-4c29-a30a-cb96ddf1dd6b"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.536012 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzddz\" (UniqueName: \"kubernetes.io/projected/124756d7-a566-4c29-a30a-cb96ddf1dd6b-kube-api-access-xzddz\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.536040 4811 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.536051 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.536383 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/124756d7-a566-4c29-a30a-cb96ddf1dd6b-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.681950 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-nxrrf" event={"ID":"124756d7-a566-4c29-a30a-cb96ddf1dd6b","Type":"ContainerDied","Data":"9f0a4b5c7f82cd86a084c106b2719325b2e6017deac4991f218fb36c2029840f"} Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.682007 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f0a4b5c7f82cd86a084c106b2719325b2e6017deac4991f218fb36c2029840f" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.682074 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-nxrrf" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.762621 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-wsjt7"] Jan 28 17:53:39 crc kubenswrapper[4811]: E0128 17:53:39.763069 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124756d7-a566-4c29-a30a-cb96ddf1dd6b" containerName="ssh-known-hosts-openstack" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.763085 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="124756d7-a566-4c29-a30a-cb96ddf1dd6b" containerName="ssh-known-hosts-openstack" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.763279 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="124756d7-a566-4c29-a30a-cb96ddf1dd6b" containerName="ssh-known-hosts-openstack" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.764153 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.772493 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.772751 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.772790 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.775992 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.778664 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-wsjt7"] Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.845773 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ceph\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.846147 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-inventory\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.846279 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.846401 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5n7f\" (UniqueName: \"kubernetes.io/projected/cdc346e5-3330-426d-83e8-1ba1c3ff709b-kube-api-access-l5n7f\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.948620 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-inventory\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.948719 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.948811 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5n7f\" (UniqueName: \"kubernetes.io/projected/cdc346e5-3330-426d-83e8-1ba1c3ff709b-kube-api-access-l5n7f\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.949070 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ceph\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.953759 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.956506 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-inventory\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.968083 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ceph\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:39 crc kubenswrapper[4811]: I0128 17:53:39.968669 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5n7f\" (UniqueName: \"kubernetes.io/projected/cdc346e5-3330-426d-83e8-1ba1c3ff709b-kube-api-access-l5n7f\") pod \"run-os-openstack-openstack-cell1-wsjt7\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:40 crc kubenswrapper[4811]: I0128 17:53:40.094084 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:40 crc kubenswrapper[4811]: I0128 17:53:40.657802 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-wsjt7"] Jan 28 17:53:40 crc kubenswrapper[4811]: I0128 17:53:40.677523 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 17:53:40 crc kubenswrapper[4811]: I0128 17:53:40.699707 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" event={"ID":"cdc346e5-3330-426d-83e8-1ba1c3ff709b","Type":"ContainerStarted","Data":"078a4a8d1ace9c9ecbd9bb3fe7c82feca554b902a8247d2fb2d02231782b1a7c"} Jan 28 17:53:41 crc kubenswrapper[4811]: I0128 17:53:41.710128 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" event={"ID":"cdc346e5-3330-426d-83e8-1ba1c3ff709b","Type":"ContainerStarted","Data":"38d2fe3983b5b64681e4400480afa24dcbf5b6971b90a8525f3dd58b2e5a7217"} Jan 28 17:53:41 crc kubenswrapper[4811]: I0128 17:53:41.739248 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" podStartSLOduration=2.204054902 podStartE2EDuration="2.739226389s" podCreationTimestamp="2026-01-28 17:53:39 +0000 UTC" firstStartedPulling="2026-01-28 17:53:40.67723271 +0000 UTC m=+7713.431596293" lastFinishedPulling="2026-01-28 17:53:41.212404197 +0000 UTC m=+7713.966767780" observedRunningTime="2026-01-28 17:53:41.734703367 +0000 UTC m=+7714.489066950" watchObservedRunningTime="2026-01-28 17:53:41.739226389 +0000 UTC m=+7714.493589972" Jan 28 17:53:42 crc kubenswrapper[4811]: I0128 17:53:42.341507 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:53:42 crc kubenswrapper[4811]: E0128 17:53:42.341875 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:53:50 crc kubenswrapper[4811]: I0128 17:53:50.800396 4811 generic.go:334] "Generic (PLEG): container finished" podID="cdc346e5-3330-426d-83e8-1ba1c3ff709b" containerID="38d2fe3983b5b64681e4400480afa24dcbf5b6971b90a8525f3dd58b2e5a7217" exitCode=0 Jan 28 17:53:50 crc kubenswrapper[4811]: I0128 17:53:50.800485 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" event={"ID":"cdc346e5-3330-426d-83e8-1ba1c3ff709b","Type":"ContainerDied","Data":"38d2fe3983b5b64681e4400480afa24dcbf5b6971b90a8525f3dd58b2e5a7217"} Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.352314 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.441477 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5n7f\" (UniqueName: \"kubernetes.io/projected/cdc346e5-3330-426d-83e8-1ba1c3ff709b-kube-api-access-l5n7f\") pod \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.441578 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-inventory\") pod \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.441691 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ceph\") pod \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.441777 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ssh-key-openstack-cell1\") pod \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\" (UID: \"cdc346e5-3330-426d-83e8-1ba1c3ff709b\") " Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.448948 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ceph" (OuterVolumeSpecName: "ceph") pod "cdc346e5-3330-426d-83e8-1ba1c3ff709b" (UID: "cdc346e5-3330-426d-83e8-1ba1c3ff709b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.449170 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc346e5-3330-426d-83e8-1ba1c3ff709b-kube-api-access-l5n7f" (OuterVolumeSpecName: "kube-api-access-l5n7f") pod "cdc346e5-3330-426d-83e8-1ba1c3ff709b" (UID: "cdc346e5-3330-426d-83e8-1ba1c3ff709b"). InnerVolumeSpecName "kube-api-access-l5n7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.477633 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "cdc346e5-3330-426d-83e8-1ba1c3ff709b" (UID: "cdc346e5-3330-426d-83e8-1ba1c3ff709b"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.481849 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-inventory" (OuterVolumeSpecName: "inventory") pod "cdc346e5-3330-426d-83e8-1ba1c3ff709b" (UID: "cdc346e5-3330-426d-83e8-1ba1c3ff709b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.544722 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.544770 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.544783 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5n7f\" (UniqueName: \"kubernetes.io/projected/cdc346e5-3330-426d-83e8-1ba1c3ff709b-kube-api-access-l5n7f\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.544792 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdc346e5-3330-426d-83e8-1ba1c3ff709b-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.821372 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" event={"ID":"cdc346e5-3330-426d-83e8-1ba1c3ff709b","Type":"ContainerDied","Data":"078a4a8d1ace9c9ecbd9bb3fe7c82feca554b902a8247d2fb2d02231782b1a7c"} Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.821420 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="078a4a8d1ace9c9ecbd9bb3fe7c82feca554b902a8247d2fb2d02231782b1a7c" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.821499 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wsjt7" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.923488 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-qhlxf"] Jan 28 17:53:52 crc kubenswrapper[4811]: E0128 17:53:52.924017 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc346e5-3330-426d-83e8-1ba1c3ff709b" containerName="run-os-openstack-openstack-cell1" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.924040 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc346e5-3330-426d-83e8-1ba1c3ff709b" containerName="run-os-openstack-openstack-cell1" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.924286 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc346e5-3330-426d-83e8-1ba1c3ff709b" containerName="run-os-openstack-openstack-cell1" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.925023 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.927734 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.930519 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.930526 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.931896 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:53:52 crc kubenswrapper[4811]: I0128 17:53:52.952897 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-qhlxf"] Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.055034 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ceph\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.055143 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-inventory\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.055921 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbz9q\" (UniqueName: \"kubernetes.io/projected/2ac47199-b6b7-4461-b5f9-d5901cbad638-kube-api-access-fbz9q\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.056036 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.158491 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ceph\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.158844 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-inventory\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.158984 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbz9q\" (UniqueName: \"kubernetes.io/projected/2ac47199-b6b7-4461-b5f9-d5901cbad638-kube-api-access-fbz9q\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.159007 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.162396 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-inventory\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.162396 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.162907 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ceph\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.180685 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbz9q\" (UniqueName: \"kubernetes.io/projected/2ac47199-b6b7-4461-b5f9-d5901cbad638-kube-api-access-fbz9q\") pod \"reboot-os-openstack-openstack-cell1-qhlxf\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.243998 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.791981 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-qhlxf"] Jan 28 17:53:53 crc kubenswrapper[4811]: I0128 17:53:53.830890 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" event={"ID":"2ac47199-b6b7-4461-b5f9-d5901cbad638","Type":"ContainerStarted","Data":"2db9618cd840adf6cc98b87f6ba06f6c74954905609eeae8e059f9e8d7e05e64"} Jan 28 17:53:54 crc kubenswrapper[4811]: I0128 17:53:54.844456 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" event={"ID":"2ac47199-b6b7-4461-b5f9-d5901cbad638","Type":"ContainerStarted","Data":"4ae0c48f03f9499a3f2e8d44697770ac99127f208874d422b443c3860140fb25"} Jan 28 17:53:54 crc kubenswrapper[4811]: I0128 17:53:54.878730 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" podStartSLOduration=2.383044884 podStartE2EDuration="2.878700983s" podCreationTimestamp="2026-01-28 17:53:52 +0000 UTC" firstStartedPulling="2026-01-28 17:53:53.800759622 +0000 UTC m=+7726.555123205" lastFinishedPulling="2026-01-28 17:53:54.296415721 +0000 UTC m=+7727.050779304" observedRunningTime="2026-01-28 17:53:54.865484326 +0000 UTC m=+7727.619847959" watchObservedRunningTime="2026-01-28 17:53:54.878700983 +0000 UTC m=+7727.633064586" Jan 28 17:53:55 crc kubenswrapper[4811]: I0128 17:53:55.339604 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:53:55 crc kubenswrapper[4811]: E0128 17:53:55.340257 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:54:08 crc kubenswrapper[4811]: I0128 17:54:08.349826 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:54:08 crc kubenswrapper[4811]: E0128 17:54:08.353229 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:54:10 crc kubenswrapper[4811]: I0128 17:54:10.985416 4811 generic.go:334] "Generic (PLEG): container finished" podID="2ac47199-b6b7-4461-b5f9-d5901cbad638" containerID="4ae0c48f03f9499a3f2e8d44697770ac99127f208874d422b443c3860140fb25" exitCode=0 Jan 28 17:54:10 crc kubenswrapper[4811]: I0128 17:54:10.985483 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" event={"ID":"2ac47199-b6b7-4461-b5f9-d5901cbad638","Type":"ContainerDied","Data":"4ae0c48f03f9499a3f2e8d44697770ac99127f208874d422b443c3860140fb25"} Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.543662 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.701890 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ssh-key-openstack-cell1\") pod \"2ac47199-b6b7-4461-b5f9-d5901cbad638\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.703020 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbz9q\" (UniqueName: \"kubernetes.io/projected/2ac47199-b6b7-4461-b5f9-d5901cbad638-kube-api-access-fbz9q\") pod \"2ac47199-b6b7-4461-b5f9-d5901cbad638\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.703110 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-inventory\") pod \"2ac47199-b6b7-4461-b5f9-d5901cbad638\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.703150 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ceph\") pod \"2ac47199-b6b7-4461-b5f9-d5901cbad638\" (UID: \"2ac47199-b6b7-4461-b5f9-d5901cbad638\") " Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.712690 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ceph" (OuterVolumeSpecName: "ceph") pod "2ac47199-b6b7-4461-b5f9-d5901cbad638" (UID: "2ac47199-b6b7-4461-b5f9-d5901cbad638"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.712798 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ac47199-b6b7-4461-b5f9-d5901cbad638-kube-api-access-fbz9q" (OuterVolumeSpecName: "kube-api-access-fbz9q") pod "2ac47199-b6b7-4461-b5f9-d5901cbad638" (UID: "2ac47199-b6b7-4461-b5f9-d5901cbad638"). InnerVolumeSpecName "kube-api-access-fbz9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.737819 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "2ac47199-b6b7-4461-b5f9-d5901cbad638" (UID: "2ac47199-b6b7-4461-b5f9-d5901cbad638"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.741708 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-inventory" (OuterVolumeSpecName: "inventory") pod "2ac47199-b6b7-4461-b5f9-d5901cbad638" (UID: "2ac47199-b6b7-4461-b5f9-d5901cbad638"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.805671 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.805946 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.805958 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2ac47199-b6b7-4461-b5f9-d5901cbad638-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:12 crc kubenswrapper[4811]: I0128 17:54:12.805967 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbz9q\" (UniqueName: \"kubernetes.io/projected/2ac47199-b6b7-4461-b5f9-d5901cbad638-kube-api-access-fbz9q\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.006620 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" event={"ID":"2ac47199-b6b7-4461-b5f9-d5901cbad638","Type":"ContainerDied","Data":"2db9618cd840adf6cc98b87f6ba06f6c74954905609eeae8e059f9e8d7e05e64"} Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.006667 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2db9618cd840adf6cc98b87f6ba06f6c74954905609eeae8e059f9e8d7e05e64" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.006644 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-qhlxf" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.147012 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-rcl46"] Jan 28 17:54:13 crc kubenswrapper[4811]: E0128 17:54:13.147495 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ac47199-b6b7-4461-b5f9-d5901cbad638" containerName="reboot-os-openstack-openstack-cell1" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.147514 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ac47199-b6b7-4461-b5f9-d5901cbad638" containerName="reboot-os-openstack-openstack-cell1" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.147782 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ac47199-b6b7-4461-b5f9-d5901cbad638" containerName="reboot-os-openstack-openstack-cell1" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.148594 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.151052 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.151052 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.151194 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.160028 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.161993 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-rcl46"] Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316391 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316575 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316629 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-inventory\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316670 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316724 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ceph\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316783 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316807 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpxc5\" (UniqueName: \"kubernetes.io/projected/fbb46d38-4318-4b82-a99a-a165a0b78e00-kube-api-access-fpxc5\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316848 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316879 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.316964 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.317037 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.317087 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419188 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419295 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419345 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-inventory\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419383 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419444 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ceph\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419498 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419522 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpxc5\" (UniqueName: \"kubernetes.io/projected/fbb46d38-4318-4b82-a99a-a165a0b78e00-kube-api-access-fpxc5\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419563 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419594 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419634 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419690 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.419734 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.424497 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.425568 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-inventory\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.425703 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ceph\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.425742 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.426056 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.426556 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.426953 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.426975 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.428504 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.438986 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.440017 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.440285 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpxc5\" (UniqueName: \"kubernetes.io/projected/fbb46d38-4318-4b82-a99a-a165a0b78e00-kube-api-access-fpxc5\") pod \"install-certs-openstack-openstack-cell1-rcl46\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:13 crc kubenswrapper[4811]: I0128 17:54:13.466122 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:14 crc kubenswrapper[4811]: I0128 17:54:14.009152 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-rcl46"] Jan 28 17:54:15 crc kubenswrapper[4811]: I0128 17:54:15.028808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" event={"ID":"fbb46d38-4318-4b82-a99a-a165a0b78e00","Type":"ContainerStarted","Data":"3724e18dc052b6fc60d792a5db306ebfabe8bae85be2c9e79e28d6b5614592bc"} Jan 28 17:54:16 crc kubenswrapper[4811]: I0128 17:54:16.039048 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" event={"ID":"fbb46d38-4318-4b82-a99a-a165a0b78e00","Type":"ContainerStarted","Data":"4804d20226ebc9f10c53b966d15b9c5e216ab17489f656ab293f692e67e1e617"} Jan 28 17:54:16 crc kubenswrapper[4811]: I0128 17:54:16.059064 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" podStartSLOduration=2.27216242 podStartE2EDuration="3.059041928s" podCreationTimestamp="2026-01-28 17:54:13 +0000 UTC" firstStartedPulling="2026-01-28 17:54:14.020459188 +0000 UTC m=+7746.774822771" lastFinishedPulling="2026-01-28 17:54:14.807338696 +0000 UTC m=+7747.561702279" observedRunningTime="2026-01-28 17:54:16.054609569 +0000 UTC m=+7748.808973152" watchObservedRunningTime="2026-01-28 17:54:16.059041928 +0000 UTC m=+7748.813405511" Jan 28 17:54:20 crc kubenswrapper[4811]: I0128 17:54:20.340188 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:54:20 crc kubenswrapper[4811]: E0128 17:54:20.341073 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:54:33 crc kubenswrapper[4811]: I0128 17:54:33.340551 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:54:33 crc kubenswrapper[4811]: E0128 17:54:33.343579 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:54:34 crc kubenswrapper[4811]: I0128 17:54:34.207908 4811 generic.go:334] "Generic (PLEG): container finished" podID="fbb46d38-4318-4b82-a99a-a165a0b78e00" containerID="4804d20226ebc9f10c53b966d15b9c5e216ab17489f656ab293f692e67e1e617" exitCode=0 Jan 28 17:54:34 crc kubenswrapper[4811]: I0128 17:54:34.207994 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" event={"ID":"fbb46d38-4318-4b82-a99a-a165a0b78e00","Type":"ContainerDied","Data":"4804d20226ebc9f10c53b966d15b9c5e216ab17489f656ab293f692e67e1e617"} Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.771536 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960278 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpxc5\" (UniqueName: \"kubernetes.io/projected/fbb46d38-4318-4b82-a99a-a165a0b78e00-kube-api-access-fpxc5\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960381 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-telemetry-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960548 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-bootstrap-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960763 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-inventory\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960818 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-libvirt-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960881 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-nova-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.960938 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ssh-key-openstack-cell1\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.961054 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ceph\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.961793 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-sriov-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.961901 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-dhcp-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.961957 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-metadata-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.962100 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ovn-combined-ca-bundle\") pod \"fbb46d38-4318-4b82-a99a-a165a0b78e00\" (UID: \"fbb46d38-4318-4b82-a99a-a165a0b78e00\") " Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.968057 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.968109 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.968651 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ceph" (OuterVolumeSpecName: "ceph") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.968690 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.968706 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.968981 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.969673 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.970376 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbb46d38-4318-4b82-a99a-a165a0b78e00-kube-api-access-fpxc5" (OuterVolumeSpecName: "kube-api-access-fpxc5") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "kube-api-access-fpxc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.973753 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.983686 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.998836 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:35 crc kubenswrapper[4811]: I0128 17:54:35.999290 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-inventory" (OuterVolumeSpecName: "inventory") pod "fbb46d38-4318-4b82-a99a-a165a0b78e00" (UID: "fbb46d38-4318-4b82-a99a-a165a0b78e00"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065097 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065137 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065148 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065157 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065169 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065179 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpxc5\" (UniqueName: \"kubernetes.io/projected/fbb46d38-4318-4b82-a99a-a165a0b78e00-kube-api-access-fpxc5\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065187 4811 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065195 4811 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065205 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065214 4811 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065222 4811 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.065231 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/fbb46d38-4318-4b82-a99a-a165a0b78e00-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.226589 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" event={"ID":"fbb46d38-4318-4b82-a99a-a165a0b78e00","Type":"ContainerDied","Data":"3724e18dc052b6fc60d792a5db306ebfabe8bae85be2c9e79e28d6b5614592bc"} Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.226629 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3724e18dc052b6fc60d792a5db306ebfabe8bae85be2c9e79e28d6b5614592bc" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.226675 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-rcl46" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.396837 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-xfg22"] Jan 28 17:54:36 crc kubenswrapper[4811]: E0128 17:54:36.397547 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbb46d38-4318-4b82-a99a-a165a0b78e00" containerName="install-certs-openstack-openstack-cell1" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.397565 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbb46d38-4318-4b82-a99a-a165a0b78e00" containerName="install-certs-openstack-openstack-cell1" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.397764 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbb46d38-4318-4b82-a99a-a165a0b78e00" containerName="install-certs-openstack-openstack-cell1" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.398496 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.404147 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.404583 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.409798 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.410012 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.439480 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-xfg22"] Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.580577 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ssh-key-openstack-cell1\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.580632 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-inventory\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.580874 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ceph\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.581165 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thlxn\" (UniqueName: \"kubernetes.io/projected/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-kube-api-access-thlxn\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.682933 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ceph\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.683064 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thlxn\" (UniqueName: \"kubernetes.io/projected/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-kube-api-access-thlxn\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.683138 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ssh-key-openstack-cell1\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.683157 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-inventory\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.687550 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-inventory\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.687820 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ssh-key-openstack-cell1\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.688708 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ceph\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.710406 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thlxn\" (UniqueName: \"kubernetes.io/projected/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-kube-api-access-thlxn\") pod \"ceph-client-openstack-openstack-cell1-xfg22\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:36 crc kubenswrapper[4811]: I0128 17:54:36.734033 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:37 crc kubenswrapper[4811]: I0128 17:54:37.327249 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-xfg22"] Jan 28 17:54:38 crc kubenswrapper[4811]: I0128 17:54:38.249557 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" event={"ID":"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0","Type":"ContainerStarted","Data":"32db5082b18ca1ef8dc7f212997bf611949740cbb39cf280dc2a98be6aa9bb78"} Jan 28 17:54:38 crc kubenswrapper[4811]: I0128 17:54:38.250235 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" event={"ID":"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0","Type":"ContainerStarted","Data":"8cfbbb756394485d7f3ed78c54b0fa860f04e224343ec4fd5fd6f5d3e9176112"} Jan 28 17:54:38 crc kubenswrapper[4811]: I0128 17:54:38.269568 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" podStartSLOduration=1.781266933 podStartE2EDuration="2.269554513s" podCreationTimestamp="2026-01-28 17:54:36 +0000 UTC" firstStartedPulling="2026-01-28 17:54:37.352000719 +0000 UTC m=+7770.106364302" lastFinishedPulling="2026-01-28 17:54:37.840288299 +0000 UTC m=+7770.594651882" observedRunningTime="2026-01-28 17:54:38.268911885 +0000 UTC m=+7771.023275478" watchObservedRunningTime="2026-01-28 17:54:38.269554513 +0000 UTC m=+7771.023918096" Jan 28 17:54:43 crc kubenswrapper[4811]: I0128 17:54:43.298153 4811 generic.go:334] "Generic (PLEG): container finished" podID="f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" containerID="32db5082b18ca1ef8dc7f212997bf611949740cbb39cf280dc2a98be6aa9bb78" exitCode=0 Jan 28 17:54:43 crc kubenswrapper[4811]: I0128 17:54:43.298233 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" event={"ID":"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0","Type":"ContainerDied","Data":"32db5082b18ca1ef8dc7f212997bf611949740cbb39cf280dc2a98be6aa9bb78"} Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.339575 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:54:44 crc kubenswrapper[4811]: E0128 17:54:44.340174 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.833348 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.975807 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thlxn\" (UniqueName: \"kubernetes.io/projected/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-kube-api-access-thlxn\") pod \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.975842 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ssh-key-openstack-cell1\") pod \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.976092 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ceph\") pod \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.976133 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-inventory\") pod \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\" (UID: \"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0\") " Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.982003 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-kube-api-access-thlxn" (OuterVolumeSpecName: "kube-api-access-thlxn") pod "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" (UID: "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0"). InnerVolumeSpecName "kube-api-access-thlxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:54:44 crc kubenswrapper[4811]: I0128 17:54:44.986535 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ceph" (OuterVolumeSpecName: "ceph") pod "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" (UID: "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.003687 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" (UID: "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.006548 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-inventory" (OuterVolumeSpecName: "inventory") pod "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" (UID: "f31d8eab-f8a5-415c-a3c5-a092ab9d99e0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.078468 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.078501 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thlxn\" (UniqueName: \"kubernetes.io/projected/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-kube-api-access-thlxn\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.078509 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.078521 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f31d8eab-f8a5-415c-a3c5-a092ab9d99e0-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.318078 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" event={"ID":"f31d8eab-f8a5-415c-a3c5-a092ab9d99e0","Type":"ContainerDied","Data":"8cfbbb756394485d7f3ed78c54b0fa860f04e224343ec4fd5fd6f5d3e9176112"} Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.318376 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cfbbb756394485d7f3ed78c54b0fa860f04e224343ec4fd5fd6f5d3e9176112" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.318129 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-xfg22" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.407276 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-vhk7d"] Jan 28 17:54:45 crc kubenswrapper[4811]: E0128 17:54:45.407761 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" containerName="ceph-client-openstack-openstack-cell1" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.407776 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" containerName="ceph-client-openstack-openstack-cell1" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.408041 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f31d8eab-f8a5-415c-a3c5-a092ab9d99e0" containerName="ceph-client-openstack-openstack-cell1" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.408835 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.414306 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.414735 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.415025 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.415675 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.417119 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.430744 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-vhk7d"] Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.590645 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.590712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ceph\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.590753 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.590828 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.590970 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwlnf\" (UniqueName: \"kubernetes.io/projected/ce47cd85-cbaa-476c-befa-5cbcc696525d-kube-api-access-rwlnf\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.591125 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-inventory\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.693066 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.693152 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwlnf\" (UniqueName: \"kubernetes.io/projected/ce47cd85-cbaa-476c-befa-5cbcc696525d-kube-api-access-rwlnf\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.693265 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-inventory\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.693374 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.693395 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ceph\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.693419 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.694400 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.699119 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.700500 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.704317 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ceph\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.708519 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-inventory\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.710373 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwlnf\" (UniqueName: \"kubernetes.io/projected/ce47cd85-cbaa-476c-befa-5cbcc696525d-kube-api-access-rwlnf\") pod \"ovn-openstack-openstack-cell1-vhk7d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:45 crc kubenswrapper[4811]: I0128 17:54:45.731048 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:54:46 crc kubenswrapper[4811]: I0128 17:54:46.293392 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-vhk7d"] Jan 28 17:54:46 crc kubenswrapper[4811]: I0128 17:54:46.326754 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" event={"ID":"ce47cd85-cbaa-476c-befa-5cbcc696525d","Type":"ContainerStarted","Data":"084c9bd191e3f7c75272e0355d89508d1dcb14745a03067f99b5de72e2985183"} Jan 28 17:54:47 crc kubenswrapper[4811]: I0128 17:54:47.341578 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" event={"ID":"ce47cd85-cbaa-476c-befa-5cbcc696525d","Type":"ContainerStarted","Data":"4d4ed2f7eb59f2739da330715073e98e4e3c4fefdeac334883d38f63a146a715"} Jan 28 17:54:47 crc kubenswrapper[4811]: I0128 17:54:47.373273 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" podStartSLOduration=1.9884278850000001 podStartE2EDuration="2.373255555s" podCreationTimestamp="2026-01-28 17:54:45 +0000 UTC" firstStartedPulling="2026-01-28 17:54:46.296983939 +0000 UTC m=+7779.051347522" lastFinishedPulling="2026-01-28 17:54:46.681811569 +0000 UTC m=+7779.436175192" observedRunningTime="2026-01-28 17:54:47.36899753 +0000 UTC m=+7780.123361113" watchObservedRunningTime="2026-01-28 17:54:47.373255555 +0000 UTC m=+7780.127619158" Jan 28 17:54:57 crc kubenswrapper[4811]: I0128 17:54:57.339803 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:54:57 crc kubenswrapper[4811]: E0128 17:54:57.340832 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:55:10 crc kubenswrapper[4811]: I0128 17:55:10.341190 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:55:10 crc kubenswrapper[4811]: E0128 17:55:10.342027 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:55:25 crc kubenswrapper[4811]: I0128 17:55:25.340222 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:55:25 crc kubenswrapper[4811]: E0128 17:55:25.340991 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:55:38 crc kubenswrapper[4811]: I0128 17:55:38.346475 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:55:38 crc kubenswrapper[4811]: E0128 17:55:38.347256 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:55:48 crc kubenswrapper[4811]: I0128 17:55:48.994985 4811 generic.go:334] "Generic (PLEG): container finished" podID="ce47cd85-cbaa-476c-befa-5cbcc696525d" containerID="4d4ed2f7eb59f2739da330715073e98e4e3c4fefdeac334883d38f63a146a715" exitCode=0 Jan 28 17:55:48 crc kubenswrapper[4811]: I0128 17:55:48.995062 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" event={"ID":"ce47cd85-cbaa-476c-befa-5cbcc696525d","Type":"ContainerDied","Data":"4d4ed2f7eb59f2739da330715073e98e4e3c4fefdeac334883d38f63a146a715"} Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.339198 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:55:50 crc kubenswrapper[4811]: E0128 17:55:50.339797 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.450297 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.631626 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovn-combined-ca-bundle\") pod \"ce47cd85-cbaa-476c-befa-5cbcc696525d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.632874 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ceph\") pod \"ce47cd85-cbaa-476c-befa-5cbcc696525d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.633117 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovncontroller-config-0\") pod \"ce47cd85-cbaa-476c-befa-5cbcc696525d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.633185 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-inventory\") pod \"ce47cd85-cbaa-476c-befa-5cbcc696525d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.633256 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ssh-key-openstack-cell1\") pod \"ce47cd85-cbaa-476c-befa-5cbcc696525d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.633355 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwlnf\" (UniqueName: \"kubernetes.io/projected/ce47cd85-cbaa-476c-befa-5cbcc696525d-kube-api-access-rwlnf\") pod \"ce47cd85-cbaa-476c-befa-5cbcc696525d\" (UID: \"ce47cd85-cbaa-476c-befa-5cbcc696525d\") " Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.639302 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce47cd85-cbaa-476c-befa-5cbcc696525d-kube-api-access-rwlnf" (OuterVolumeSpecName: "kube-api-access-rwlnf") pod "ce47cd85-cbaa-476c-befa-5cbcc696525d" (UID: "ce47cd85-cbaa-476c-befa-5cbcc696525d"). InnerVolumeSpecName "kube-api-access-rwlnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.639839 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ceph" (OuterVolumeSpecName: "ceph") pod "ce47cd85-cbaa-476c-befa-5cbcc696525d" (UID: "ce47cd85-cbaa-476c-befa-5cbcc696525d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.641829 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ce47cd85-cbaa-476c-befa-5cbcc696525d" (UID: "ce47cd85-cbaa-476c-befa-5cbcc696525d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.664407 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "ce47cd85-cbaa-476c-befa-5cbcc696525d" (UID: "ce47cd85-cbaa-476c-befa-5cbcc696525d"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.668266 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "ce47cd85-cbaa-476c-befa-5cbcc696525d" (UID: "ce47cd85-cbaa-476c-befa-5cbcc696525d"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.670949 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-inventory" (OuterVolumeSpecName: "inventory") pod "ce47cd85-cbaa-476c-befa-5cbcc696525d" (UID: "ce47cd85-cbaa-476c-befa-5cbcc696525d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.736866 4811 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.736911 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.736925 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.736938 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwlnf\" (UniqueName: \"kubernetes.io/projected/ce47cd85-cbaa-476c-befa-5cbcc696525d-kube-api-access-rwlnf\") on node \"crc\" DevicePath \"\"" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.737024 4811 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:55:50 crc kubenswrapper[4811]: I0128 17:55:50.737038 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce47cd85-cbaa-476c-befa-5cbcc696525d-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.043424 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.043414 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-vhk7d" event={"ID":"ce47cd85-cbaa-476c-befa-5cbcc696525d","Type":"ContainerDied","Data":"084c9bd191e3f7c75272e0355d89508d1dcb14745a03067f99b5de72e2985183"} Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.043626 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="084c9bd191e3f7c75272e0355d89508d1dcb14745a03067f99b5de72e2985183" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.127121 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-mcwdt"] Jan 28 17:55:51 crc kubenswrapper[4811]: E0128 17:55:51.127628 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce47cd85-cbaa-476c-befa-5cbcc696525d" containerName="ovn-openstack-openstack-cell1" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.127645 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce47cd85-cbaa-476c-befa-5cbcc696525d" containerName="ovn-openstack-openstack-cell1" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.127880 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce47cd85-cbaa-476c-befa-5cbcc696525d" containerName="ovn-openstack-openstack-cell1" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.128622 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.138380 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.138626 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.138755 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.139184 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.139293 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.139493 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.140686 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-mcwdt"] Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.246686 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.246750 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.246820 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.246890 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbg8r\" (UniqueName: \"kubernetes.io/projected/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-kube-api-access-zbg8r\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.246950 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.247007 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.247060 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348580 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348700 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348728 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348773 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348811 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbg8r\" (UniqueName: \"kubernetes.io/projected/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-kube-api-access-zbg8r\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348848 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.348879 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.352326 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.352692 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.352830 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.356939 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.356979 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.357314 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.369035 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbg8r\" (UniqueName: \"kubernetes.io/projected/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-kube-api-access-zbg8r\") pod \"neutron-metadata-openstack-openstack-cell1-mcwdt\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:51 crc kubenswrapper[4811]: I0128 17:55:51.445796 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:55:52 crc kubenswrapper[4811]: I0128 17:55:52.001867 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-mcwdt"] Jan 28 17:55:52 crc kubenswrapper[4811]: I0128 17:55:52.056650 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" event={"ID":"9217c9b6-59ad-41ca-afa6-b11cb6a1611a","Type":"ContainerStarted","Data":"1e59a3d7d72309a52ff394f65721cf73ce976f4586d242f0393adce17dcc06b9"} Jan 28 17:55:53 crc kubenswrapper[4811]: I0128 17:55:53.068736 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" event={"ID":"9217c9b6-59ad-41ca-afa6-b11cb6a1611a","Type":"ContainerStarted","Data":"f4cd7699743b53b8619e7dbf5560db2911b4643f87c189597b140519b4712740"} Jan 28 17:55:53 crc kubenswrapper[4811]: I0128 17:55:53.085345 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" podStartSLOduration=1.6416099769999999 podStartE2EDuration="2.085327542s" podCreationTimestamp="2026-01-28 17:55:51 +0000 UTC" firstStartedPulling="2026-01-28 17:55:52.004929713 +0000 UTC m=+7844.759293296" lastFinishedPulling="2026-01-28 17:55:52.448647278 +0000 UTC m=+7845.203010861" observedRunningTime="2026-01-28 17:55:53.083947305 +0000 UTC m=+7845.838310908" watchObservedRunningTime="2026-01-28 17:55:53.085327542 +0000 UTC m=+7845.839691125" Jan 28 17:56:01 crc kubenswrapper[4811]: I0128 17:56:01.339831 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:56:01 crc kubenswrapper[4811]: E0128 17:56:01.340608 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:56:14 crc kubenswrapper[4811]: I0128 17:56:14.339625 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:56:14 crc kubenswrapper[4811]: E0128 17:56:14.340448 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:56:28 crc kubenswrapper[4811]: I0128 17:56:28.348057 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:56:28 crc kubenswrapper[4811]: E0128 17:56:28.349037 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:56:40 crc kubenswrapper[4811]: I0128 17:56:40.339837 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:56:40 crc kubenswrapper[4811]: E0128 17:56:40.340662 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:56:41 crc kubenswrapper[4811]: I0128 17:56:41.503826 4811 generic.go:334] "Generic (PLEG): container finished" podID="9217c9b6-59ad-41ca-afa6-b11cb6a1611a" containerID="f4cd7699743b53b8619e7dbf5560db2911b4643f87c189597b140519b4712740" exitCode=0 Jan 28 17:56:41 crc kubenswrapper[4811]: I0128 17:56:41.503929 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" event={"ID":"9217c9b6-59ad-41ca-afa6-b11cb6a1611a","Type":"ContainerDied","Data":"f4cd7699743b53b8619e7dbf5560db2911b4643f87c189597b140519b4712740"} Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.049714 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172009 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbg8r\" (UniqueName: \"kubernetes.io/projected/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-kube-api-access-zbg8r\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172142 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172169 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-inventory\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172221 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-metadata-combined-ca-bundle\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172272 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-nova-metadata-neutron-config-0\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172324 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ceph\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.172379 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ssh-key-openstack-cell1\") pod \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\" (UID: \"9217c9b6-59ad-41ca-afa6-b11cb6a1611a\") " Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.179100 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.179716 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ceph" (OuterVolumeSpecName: "ceph") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.179098 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-kube-api-access-zbg8r" (OuterVolumeSpecName: "kube-api-access-zbg8r") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "kube-api-access-zbg8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.203813 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.204996 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.205519 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-inventory" (OuterVolumeSpecName: "inventory") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.206075 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "9217c9b6-59ad-41ca-afa6-b11cb6a1611a" (UID: "9217c9b6-59ad-41ca-afa6-b11cb6a1611a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276836 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbg8r\" (UniqueName: \"kubernetes.io/projected/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-kube-api-access-zbg8r\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276881 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276898 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276913 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276928 4811 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276940 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.276952 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/9217c9b6-59ad-41ca-afa6-b11cb6a1611a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.526308 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" event={"ID":"9217c9b6-59ad-41ca-afa6-b11cb6a1611a","Type":"ContainerDied","Data":"1e59a3d7d72309a52ff394f65721cf73ce976f4586d242f0393adce17dcc06b9"} Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.526635 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e59a3d7d72309a52ff394f65721cf73ce976f4586d242f0393adce17dcc06b9" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.526394 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mcwdt" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.637042 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-rszk6"] Jan 28 17:56:43 crc kubenswrapper[4811]: E0128 17:56:43.637704 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9217c9b6-59ad-41ca-afa6-b11cb6a1611a" containerName="neutron-metadata-openstack-openstack-cell1" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.637728 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="9217c9b6-59ad-41ca-afa6-b11cb6a1611a" containerName="neutron-metadata-openstack-openstack-cell1" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.637994 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="9217c9b6-59ad-41ca-afa6-b11cb6a1611a" containerName="neutron-metadata-openstack-openstack-cell1" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.639005 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.641104 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.641152 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.643579 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.643690 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.645529 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.650629 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-rszk6"] Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.786050 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ceph\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.786357 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.786523 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.786678 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2vp6\" (UniqueName: \"kubernetes.io/projected/b6ea447f-728c-4c66-b2d3-04009c00e284-kube-api-access-s2vp6\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.786864 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-inventory\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.786983 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.888876 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ceph\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.889232 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.889808 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.889886 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2vp6\" (UniqueName: \"kubernetes.io/projected/b6ea447f-728c-4c66-b2d3-04009c00e284-kube-api-access-s2vp6\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.890030 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-inventory\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.890054 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.895238 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.895794 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-inventory\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.896269 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.898251 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.904838 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ceph\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.907595 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2vp6\" (UniqueName: \"kubernetes.io/projected/b6ea447f-728c-4c66-b2d3-04009c00e284-kube-api-access-s2vp6\") pod \"libvirt-openstack-openstack-cell1-rszk6\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:43 crc kubenswrapper[4811]: I0128 17:56:43.959482 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 17:56:44 crc kubenswrapper[4811]: I0128 17:56:44.527862 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-rszk6"] Jan 28 17:56:44 crc kubenswrapper[4811]: W0128 17:56:44.530364 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6ea447f_728c_4c66_b2d3_04009c00e284.slice/crio-cd59746767bbb2c130a65d395716c16a29ca2bc18f5e65868671f375ffd538d6 WatchSource:0}: Error finding container cd59746767bbb2c130a65d395716c16a29ca2bc18f5e65868671f375ffd538d6: Status 404 returned error can't find the container with id cd59746767bbb2c130a65d395716c16a29ca2bc18f5e65868671f375ffd538d6 Jan 28 17:56:45 crc kubenswrapper[4811]: I0128 17:56:45.551097 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" event={"ID":"b6ea447f-728c-4c66-b2d3-04009c00e284","Type":"ContainerStarted","Data":"cd59746767bbb2c130a65d395716c16a29ca2bc18f5e65868671f375ffd538d6"} Jan 28 17:56:47 crc kubenswrapper[4811]: I0128 17:56:47.570791 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" event={"ID":"b6ea447f-728c-4c66-b2d3-04009c00e284","Type":"ContainerStarted","Data":"8525619e0c264cd9183521e22405b76d77db0d6635a7ff9993aa518f5cc618a3"} Jan 28 17:56:47 crc kubenswrapper[4811]: I0128 17:56:47.594786 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" podStartSLOduration=2.284572304 podStartE2EDuration="4.594765562s" podCreationTimestamp="2026-01-28 17:56:43 +0000 UTC" firstStartedPulling="2026-01-28 17:56:44.53584058 +0000 UTC m=+7897.290204163" lastFinishedPulling="2026-01-28 17:56:46.846033838 +0000 UTC m=+7899.600397421" observedRunningTime="2026-01-28 17:56:47.584598758 +0000 UTC m=+7900.338962351" watchObservedRunningTime="2026-01-28 17:56:47.594765562 +0000 UTC m=+7900.349129145" Jan 28 17:56:55 crc kubenswrapper[4811]: I0128 17:56:55.339788 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:56:55 crc kubenswrapper[4811]: E0128 17:56:55.340666 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:57:08 crc kubenswrapper[4811]: I0128 17:57:08.348393 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:57:08 crc kubenswrapper[4811]: E0128 17:57:08.349568 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:57:20 crc kubenswrapper[4811]: I0128 17:57:20.340144 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:57:20 crc kubenswrapper[4811]: E0128 17:57:20.340982 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:57:34 crc kubenswrapper[4811]: I0128 17:57:34.340121 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:57:34 crc kubenswrapper[4811]: E0128 17:57:34.341276 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:57:45 crc kubenswrapper[4811]: I0128 17:57:45.340966 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:57:45 crc kubenswrapper[4811]: E0128 17:57:45.341984 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:57:56 crc kubenswrapper[4811]: I0128 17:57:56.340145 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:57:56 crc kubenswrapper[4811]: E0128 17:57:56.341287 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 17:58:08 crc kubenswrapper[4811]: I0128 17:58:08.349530 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 17:58:09 crc kubenswrapper[4811]: I0128 17:58:09.457345 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"3c50e1a118002ad5673703280109cdd35f9243f49b098e3b9e9d8b0f7d945e39"} Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.162577 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl"] Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.164978 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.167503 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.170490 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.174415 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl"] Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.342731 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-config-volume\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.342857 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56nr7\" (UniqueName: \"kubernetes.io/projected/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-kube-api-access-56nr7\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.342882 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-secret-volume\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.444639 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-config-volume\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.444736 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56nr7\" (UniqueName: \"kubernetes.io/projected/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-kube-api-access-56nr7\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.444757 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-secret-volume\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.445900 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-config-volume\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.453205 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-secret-volume\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.462473 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56nr7\" (UniqueName: \"kubernetes.io/projected/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-kube-api-access-56nr7\") pod \"collect-profiles-29493720-6nwxl\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.486476 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:00 crc kubenswrapper[4811]: I0128 18:00:00.987914 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl"] Jan 28 18:00:01 crc kubenswrapper[4811]: I0128 18:00:01.577046 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" event={"ID":"31f35550-f5f7-44cb-99fc-a2371e1ee3e4","Type":"ContainerStarted","Data":"5018d6a037b2b347e53bda518ce00bb73162394febea864ffdd6a2878ecc4f16"} Jan 28 18:00:02 crc kubenswrapper[4811]: I0128 18:00:02.587229 4811 generic.go:334] "Generic (PLEG): container finished" podID="31f35550-f5f7-44cb-99fc-a2371e1ee3e4" containerID="43d8a2d59e2d539721ecede20b7258da11132293029db55d2bd756fe9c3d3b4c" exitCode=0 Jan 28 18:00:02 crc kubenswrapper[4811]: I0128 18:00:02.587339 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" event={"ID":"31f35550-f5f7-44cb-99fc-a2371e1ee3e4","Type":"ContainerDied","Data":"43d8a2d59e2d539721ecede20b7258da11132293029db55d2bd756fe9c3d3b4c"} Jan 28 18:00:03 crc kubenswrapper[4811]: I0128 18:00:03.959775 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.122255 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56nr7\" (UniqueName: \"kubernetes.io/projected/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-kube-api-access-56nr7\") pod \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.122361 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-secret-volume\") pod \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.122518 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-config-volume\") pod \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\" (UID: \"31f35550-f5f7-44cb-99fc-a2371e1ee3e4\") " Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.123150 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-config-volume" (OuterVolumeSpecName: "config-volume") pod "31f35550-f5f7-44cb-99fc-a2371e1ee3e4" (UID: "31f35550-f5f7-44cb-99fc-a2371e1ee3e4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.128588 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "31f35550-f5f7-44cb-99fc-a2371e1ee3e4" (UID: "31f35550-f5f7-44cb-99fc-a2371e1ee3e4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.130747 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-kube-api-access-56nr7" (OuterVolumeSpecName: "kube-api-access-56nr7") pod "31f35550-f5f7-44cb-99fc-a2371e1ee3e4" (UID: "31f35550-f5f7-44cb-99fc-a2371e1ee3e4"). InnerVolumeSpecName "kube-api-access-56nr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.225121 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.225164 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56nr7\" (UniqueName: \"kubernetes.io/projected/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-kube-api-access-56nr7\") on node \"crc\" DevicePath \"\"" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.225174 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31f35550-f5f7-44cb-99fc-a2371e1ee3e4-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.608307 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" event={"ID":"31f35550-f5f7-44cb-99fc-a2371e1ee3e4","Type":"ContainerDied","Data":"5018d6a037b2b347e53bda518ce00bb73162394febea864ffdd6a2878ecc4f16"} Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.608351 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5018d6a037b2b347e53bda518ce00bb73162394febea864ffdd6a2878ecc4f16" Jan 28 18:00:04 crc kubenswrapper[4811]: I0128 18:00:04.608405 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493720-6nwxl" Jan 28 18:00:05 crc kubenswrapper[4811]: I0128 18:00:05.036397 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn"] Jan 28 18:00:05 crc kubenswrapper[4811]: I0128 18:00:05.061240 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493675-dv2kn"] Jan 28 18:00:06 crc kubenswrapper[4811]: I0128 18:00:06.354499 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1357af3-97dc-4364-8c0f-88e491dd605d" path="/var/lib/kubelet/pods/c1357af3-97dc-4364-8c0f-88e491dd605d/volumes" Jan 28 18:00:33 crc kubenswrapper[4811]: I0128 18:00:33.087612 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:00:33 crc kubenswrapper[4811]: I0128 18:00:33.088128 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:00:37 crc kubenswrapper[4811]: I0128 18:00:37.467931 4811 scope.go:117] "RemoveContainer" containerID="24f4d1bbb82b607e8f4c22f7a51f59946c92afba512f147a7c68d6cc3793d82f" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.161209 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29493721-rc9sk"] Jan 28 18:01:00 crc kubenswrapper[4811]: E0128 18:01:00.163727 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f35550-f5f7-44cb-99fc-a2371e1ee3e4" containerName="collect-profiles" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.163765 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f35550-f5f7-44cb-99fc-a2371e1ee3e4" containerName="collect-profiles" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.164120 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="31f35550-f5f7-44cb-99fc-a2371e1ee3e4" containerName="collect-profiles" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.167824 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.177877 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29493721-rc9sk"] Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.311492 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-config-data\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.311559 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-combined-ca-bundle\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.311623 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-fernet-keys\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.311726 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92zfj\" (UniqueName: \"kubernetes.io/projected/fbd629a4-a54d-4748-99f1-db9f8ed0a197-kube-api-access-92zfj\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.413380 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-fernet-keys\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.413598 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92zfj\" (UniqueName: \"kubernetes.io/projected/fbd629a4-a54d-4748-99f1-db9f8ed0a197-kube-api-access-92zfj\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.413722 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-config-data\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.413770 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-combined-ca-bundle\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.419544 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-combined-ca-bundle\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.419750 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-config-data\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.423216 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-fernet-keys\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.431924 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92zfj\" (UniqueName: \"kubernetes.io/projected/fbd629a4-a54d-4748-99f1-db9f8ed0a197-kube-api-access-92zfj\") pod \"keystone-cron-29493721-rc9sk\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.505976 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:00 crc kubenswrapper[4811]: I0128 18:01:00.994477 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29493721-rc9sk"] Jan 28 18:01:01 crc kubenswrapper[4811]: I0128 18:01:01.181902 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29493721-rc9sk" event={"ID":"fbd629a4-a54d-4748-99f1-db9f8ed0a197","Type":"ContainerStarted","Data":"382d3df5fd761d02d3721f56d1feea879c52abb518503f0b45112cbbca43f63f"} Jan 28 18:01:02 crc kubenswrapper[4811]: I0128 18:01:02.193987 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29493721-rc9sk" event={"ID":"fbd629a4-a54d-4748-99f1-db9f8ed0a197","Type":"ContainerStarted","Data":"d9d71d16e061c5bb8e5a3cb3a9fa52e8101d0b549baf1d6085c040ec7477082c"} Jan 28 18:01:02 crc kubenswrapper[4811]: I0128 18:01:02.229951 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29493721-rc9sk" podStartSLOduration=2.22992342 podStartE2EDuration="2.22992342s" podCreationTimestamp="2026-01-28 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:01:02.217523724 +0000 UTC m=+8154.971887307" watchObservedRunningTime="2026-01-28 18:01:02.22992342 +0000 UTC m=+8154.984287003" Jan 28 18:01:03 crc kubenswrapper[4811]: I0128 18:01:03.087485 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:01:03 crc kubenswrapper[4811]: I0128 18:01:03.088166 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:01:07 crc kubenswrapper[4811]: I0128 18:01:07.262380 4811 generic.go:334] "Generic (PLEG): container finished" podID="fbd629a4-a54d-4748-99f1-db9f8ed0a197" containerID="d9d71d16e061c5bb8e5a3cb3a9fa52e8101d0b549baf1d6085c040ec7477082c" exitCode=0 Jan 28 18:01:07 crc kubenswrapper[4811]: I0128 18:01:07.262691 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29493721-rc9sk" event={"ID":"fbd629a4-a54d-4748-99f1-db9f8ed0a197","Type":"ContainerDied","Data":"d9d71d16e061c5bb8e5a3cb3a9fa52e8101d0b549baf1d6085c040ec7477082c"} Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.669323 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.821915 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-config-data\") pod \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.822045 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92zfj\" (UniqueName: \"kubernetes.io/projected/fbd629a4-a54d-4748-99f1-db9f8ed0a197-kube-api-access-92zfj\") pod \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.822113 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-combined-ca-bundle\") pod \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.822301 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-fernet-keys\") pod \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\" (UID: \"fbd629a4-a54d-4748-99f1-db9f8ed0a197\") " Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.828223 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbd629a4-a54d-4748-99f1-db9f8ed0a197-kube-api-access-92zfj" (OuterVolumeSpecName: "kube-api-access-92zfj") pod "fbd629a4-a54d-4748-99f1-db9f8ed0a197" (UID: "fbd629a4-a54d-4748-99f1-db9f8ed0a197"). InnerVolumeSpecName "kube-api-access-92zfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.831486 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fbd629a4-a54d-4748-99f1-db9f8ed0a197" (UID: "fbd629a4-a54d-4748-99f1-db9f8ed0a197"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.863082 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbd629a4-a54d-4748-99f1-db9f8ed0a197" (UID: "fbd629a4-a54d-4748-99f1-db9f8ed0a197"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.877079 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-config-data" (OuterVolumeSpecName: "config-data") pod "fbd629a4-a54d-4748-99f1-db9f8ed0a197" (UID: "fbd629a4-a54d-4748-99f1-db9f8ed0a197"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.924928 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92zfj\" (UniqueName: \"kubernetes.io/projected/fbd629a4-a54d-4748-99f1-db9f8ed0a197-kube-api-access-92zfj\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.924963 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.924974 4811 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:08 crc kubenswrapper[4811]: I0128 18:01:08.924985 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbd629a4-a54d-4748-99f1-db9f8ed0a197-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:09 crc kubenswrapper[4811]: I0128 18:01:09.288383 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29493721-rc9sk" event={"ID":"fbd629a4-a54d-4748-99f1-db9f8ed0a197","Type":"ContainerDied","Data":"382d3df5fd761d02d3721f56d1feea879c52abb518503f0b45112cbbca43f63f"} Jan 28 18:01:09 crc kubenswrapper[4811]: I0128 18:01:09.288446 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="382d3df5fd761d02d3721f56d1feea879c52abb518503f0b45112cbbca43f63f" Jan 28 18:01:09 crc kubenswrapper[4811]: I0128 18:01:09.288464 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29493721-rc9sk" Jan 28 18:01:11 crc kubenswrapper[4811]: I0128 18:01:11.315409 4811 generic.go:334] "Generic (PLEG): container finished" podID="b6ea447f-728c-4c66-b2d3-04009c00e284" containerID="8525619e0c264cd9183521e22405b76d77db0d6635a7ff9993aa518f5cc618a3" exitCode=0 Jan 28 18:01:11 crc kubenswrapper[4811]: I0128 18:01:11.315497 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" event={"ID":"b6ea447f-728c-4c66-b2d3-04009c00e284","Type":"ContainerDied","Data":"8525619e0c264cd9183521e22405b76d77db0d6635a7ff9993aa518f5cc618a3"} Jan 28 18:01:12 crc kubenswrapper[4811]: I0128 18:01:12.841587 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.016741 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-combined-ca-bundle\") pod \"b6ea447f-728c-4c66-b2d3-04009c00e284\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.017509 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-secret-0\") pod \"b6ea447f-728c-4c66-b2d3-04009c00e284\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.017595 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-inventory\") pod \"b6ea447f-728c-4c66-b2d3-04009c00e284\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.017675 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ssh-key-openstack-cell1\") pod \"b6ea447f-728c-4c66-b2d3-04009c00e284\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.017890 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2vp6\" (UniqueName: \"kubernetes.io/projected/b6ea447f-728c-4c66-b2d3-04009c00e284-kube-api-access-s2vp6\") pod \"b6ea447f-728c-4c66-b2d3-04009c00e284\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.017988 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ceph\") pod \"b6ea447f-728c-4c66-b2d3-04009c00e284\" (UID: \"b6ea447f-728c-4c66-b2d3-04009c00e284\") " Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.024097 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b6ea447f-728c-4c66-b2d3-04009c00e284" (UID: "b6ea447f-728c-4c66-b2d3-04009c00e284"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.025318 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ceph" (OuterVolumeSpecName: "ceph") pod "b6ea447f-728c-4c66-b2d3-04009c00e284" (UID: "b6ea447f-728c-4c66-b2d3-04009c00e284"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.025335 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ea447f-728c-4c66-b2d3-04009c00e284-kube-api-access-s2vp6" (OuterVolumeSpecName: "kube-api-access-s2vp6") pod "b6ea447f-728c-4c66-b2d3-04009c00e284" (UID: "b6ea447f-728c-4c66-b2d3-04009c00e284"). InnerVolumeSpecName "kube-api-access-s2vp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.054556 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "b6ea447f-728c-4c66-b2d3-04009c00e284" (UID: "b6ea447f-728c-4c66-b2d3-04009c00e284"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.057207 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-inventory" (OuterVolumeSpecName: "inventory") pod "b6ea447f-728c-4c66-b2d3-04009c00e284" (UID: "b6ea447f-728c-4c66-b2d3-04009c00e284"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.068594 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "b6ea447f-728c-4c66-b2d3-04009c00e284" (UID: "b6ea447f-728c-4c66-b2d3-04009c00e284"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.120399 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.120462 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2vp6\" (UniqueName: \"kubernetes.io/projected/b6ea447f-728c-4c66-b2d3-04009c00e284-kube-api-access-s2vp6\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.120481 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.120492 4811 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.120505 4811 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.120515 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6ea447f-728c-4c66-b2d3-04009c00e284-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.336142 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" event={"ID":"b6ea447f-728c-4c66-b2d3-04009c00e284","Type":"ContainerDied","Data":"cd59746767bbb2c130a65d395716c16a29ca2bc18f5e65868671f375ffd538d6"} Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.336189 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd59746767bbb2c130a65d395716c16a29ca2bc18f5e65868671f375ffd538d6" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.336201 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-rszk6" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.426948 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-k5j98"] Jan 28 18:01:13 crc kubenswrapper[4811]: E0128 18:01:13.427492 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ea447f-728c-4c66-b2d3-04009c00e284" containerName="libvirt-openstack-openstack-cell1" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.427519 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ea447f-728c-4c66-b2d3-04009c00e284" containerName="libvirt-openstack-openstack-cell1" Jan 28 18:01:13 crc kubenswrapper[4811]: E0128 18:01:13.427565 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd629a4-a54d-4748-99f1-db9f8ed0a197" containerName="keystone-cron" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.427573 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd629a4-a54d-4748-99f1-db9f8ed0a197" containerName="keystone-cron" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.427801 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbd629a4-a54d-4748-99f1-db9f8ed0a197" containerName="keystone-cron" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.427828 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ea447f-728c-4c66-b2d3-04009c00e284" containerName="libvirt-openstack-openstack-cell1" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.428815 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.431773 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.432376 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.432531 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.434680 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.434793 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.434919 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435353 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ceph\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435423 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl6rz\" (UniqueName: \"kubernetes.io/projected/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-kube-api-access-cl6rz\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435458 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435524 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435664 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435698 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435741 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-inventory\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435775 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435799 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435826 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.435969 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.436335 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.449405 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-k5j98"] Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.539692 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.539786 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ceph\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.539841 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl6rz\" (UniqueName: \"kubernetes.io/projected/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-kube-api-access-cl6rz\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.539909 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540014 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540039 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540063 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-inventory\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540096 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540126 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540155 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.540193 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.541139 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.541194 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.543696 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.544422 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.545154 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.545205 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.549942 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ceph\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.555242 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-inventory\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.555242 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.555613 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.559676 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl6rz\" (UniqueName: \"kubernetes.io/projected/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-kube-api-access-cl6rz\") pod \"nova-cell1-openstack-openstack-cell1-k5j98\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:13 crc kubenswrapper[4811]: I0128 18:01:13.758756 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:01:14 crc kubenswrapper[4811]: I0128 18:01:14.347504 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 18:01:14 crc kubenswrapper[4811]: I0128 18:01:14.358663 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-k5j98"] Jan 28 18:01:15 crc kubenswrapper[4811]: I0128 18:01:15.358996 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" event={"ID":"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170","Type":"ContainerStarted","Data":"89497894222c71cf7c216c1834f84efc04195652ceadb9f8cd3e419239c06d11"} Jan 28 18:01:17 crc kubenswrapper[4811]: I0128 18:01:17.392120 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" event={"ID":"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170","Type":"ContainerStarted","Data":"80abe6b7770312be77eb1d025162fa43f59559b31e83136b8ab5e194c4da2aed"} Jan 28 18:01:17 crc kubenswrapper[4811]: I0128 18:01:17.412940 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" podStartSLOduration=2.47832819 podStartE2EDuration="4.412922017s" podCreationTimestamp="2026-01-28 18:01:13 +0000 UTC" firstStartedPulling="2026-01-28 18:01:14.347115207 +0000 UTC m=+8167.101478800" lastFinishedPulling="2026-01-28 18:01:16.281709044 +0000 UTC m=+8169.036072627" observedRunningTime="2026-01-28 18:01:17.409758341 +0000 UTC m=+8170.164121924" watchObservedRunningTime="2026-01-28 18:01:17.412922017 +0000 UTC m=+8170.167285590" Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.087177 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.087739 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.087792 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.088617 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c50e1a118002ad5673703280109cdd35f9243f49b098e3b9e9d8b0f7d945e39"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.088665 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://3c50e1a118002ad5673703280109cdd35f9243f49b098e3b9e9d8b0f7d945e39" gracePeriod=600 Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.565683 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="3c50e1a118002ad5673703280109cdd35f9243f49b098e3b9e9d8b0f7d945e39" exitCode=0 Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.565754 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"3c50e1a118002ad5673703280109cdd35f9243f49b098e3b9e9d8b0f7d945e39"} Jan 28 18:01:33 crc kubenswrapper[4811]: I0128 18:01:33.566201 4811 scope.go:117] "RemoveContainer" containerID="90375971f2c5a7938073dc9ee633c0e5f78e2d3a0ea787115d60e65aaaecf886" Jan 28 18:01:34 crc kubenswrapper[4811]: I0128 18:01:34.577542 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00"} Jan 28 18:01:57 crc kubenswrapper[4811]: I0128 18:01:57.861506 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f9h85"] Jan 28 18:01:57 crc kubenswrapper[4811]: I0128 18:01:57.864292 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:57 crc kubenswrapper[4811]: I0128 18:01:57.873309 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f9h85"] Jan 28 18:01:57 crc kubenswrapper[4811]: I0128 18:01:57.913919 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-catalog-content\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:57 crc kubenswrapper[4811]: I0128 18:01:57.914385 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-utilities\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:57 crc kubenswrapper[4811]: I0128 18:01:57.914555 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9qfq\" (UniqueName: \"kubernetes.io/projected/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-kube-api-access-t9qfq\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.015911 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-utilities\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.015977 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9qfq\" (UniqueName: \"kubernetes.io/projected/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-kube-api-access-t9qfq\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.016052 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-catalog-content\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.016542 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-utilities\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.016650 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-catalog-content\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.040681 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9qfq\" (UniqueName: \"kubernetes.io/projected/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-kube-api-access-t9qfq\") pod \"community-operators-f9h85\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.184239 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:01:58 crc kubenswrapper[4811]: I0128 18:01:58.930823 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f9h85"] Jan 28 18:01:59 crc kubenswrapper[4811]: I0128 18:01:59.819859 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerStarted","Data":"fe64bf3c17d1af90dd6a0f38711a4c3d111f56b76f402a811d515a5f29d08303"} Jan 28 18:02:00 crc kubenswrapper[4811]: I0128 18:02:00.829390 4811 generic.go:334] "Generic (PLEG): container finished" podID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerID="6e31f21e22ce32d5bf33b2aac8ee150d6b043ec74487e6e3f44dd10bdccd7c52" exitCode=0 Jan 28 18:02:00 crc kubenswrapper[4811]: I0128 18:02:00.829475 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerDied","Data":"6e31f21e22ce32d5bf33b2aac8ee150d6b043ec74487e6e3f44dd10bdccd7c52"} Jan 28 18:02:03 crc kubenswrapper[4811]: I0128 18:02:03.865618 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerStarted","Data":"48a3e38080feac74e9e0d4294d536ddaf334c2603e1d79f703a9d4ddca0eace0"} Jan 28 18:02:04 crc kubenswrapper[4811]: E0128 18:02:04.342037 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88d44ca2_6c87_45ee_b9be_5b7d7f8e6801.slice/crio-48a3e38080feac74e9e0d4294d536ddaf334c2603e1d79f703a9d4ddca0eace0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88d44ca2_6c87_45ee_b9be_5b7d7f8e6801.slice/crio-conmon-48a3e38080feac74e9e0d4294d536ddaf334c2603e1d79f703a9d4ddca0eace0.scope\": RecentStats: unable to find data in memory cache]" Jan 28 18:02:04 crc kubenswrapper[4811]: I0128 18:02:04.876862 4811 generic.go:334] "Generic (PLEG): container finished" podID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerID="48a3e38080feac74e9e0d4294d536ddaf334c2603e1d79f703a9d4ddca0eace0" exitCode=0 Jan 28 18:02:04 crc kubenswrapper[4811]: I0128 18:02:04.876933 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerDied","Data":"48a3e38080feac74e9e0d4294d536ddaf334c2603e1d79f703a9d4ddca0eace0"} Jan 28 18:02:07 crc kubenswrapper[4811]: I0128 18:02:07.908256 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerStarted","Data":"4568f4798a7db9e33ab027f34adde147370c047c15180108912b2b03f7863af2"} Jan 28 18:02:07 crc kubenswrapper[4811]: I0128 18:02:07.934338 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f9h85" podStartSLOduration=4.852181989 podStartE2EDuration="10.93431816s" podCreationTimestamp="2026-01-28 18:01:57 +0000 UTC" firstStartedPulling="2026-01-28 18:02:00.832513623 +0000 UTC m=+8213.586877206" lastFinishedPulling="2026-01-28 18:02:06.914649794 +0000 UTC m=+8219.669013377" observedRunningTime="2026-01-28 18:02:07.932361626 +0000 UTC m=+8220.686725219" watchObservedRunningTime="2026-01-28 18:02:07.93431816 +0000 UTC m=+8220.688681733" Jan 28 18:02:08 crc kubenswrapper[4811]: I0128 18:02:08.185197 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:02:08 crc kubenswrapper[4811]: I0128 18:02:08.185245 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:02:09 crc kubenswrapper[4811]: I0128 18:02:09.231025 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-f9h85" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="registry-server" probeResult="failure" output=< Jan 28 18:02:09 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:02:09 crc kubenswrapper[4811]: > Jan 28 18:02:18 crc kubenswrapper[4811]: I0128 18:02:18.253940 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:02:18 crc kubenswrapper[4811]: I0128 18:02:18.321853 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:02:18 crc kubenswrapper[4811]: I0128 18:02:18.504109 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f9h85"] Jan 28 18:02:20 crc kubenswrapper[4811]: I0128 18:02:20.033099 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f9h85" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="registry-server" containerID="cri-o://4568f4798a7db9e33ab027f34adde147370c047c15180108912b2b03f7863af2" gracePeriod=2 Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.053009 4811 generic.go:334] "Generic (PLEG): container finished" podID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerID="4568f4798a7db9e33ab027f34adde147370c047c15180108912b2b03f7863af2" exitCode=0 Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.053063 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerDied","Data":"4568f4798a7db9e33ab027f34adde147370c047c15180108912b2b03f7863af2"} Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.258576 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.461598 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-utilities\") pod \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.461970 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-catalog-content\") pod \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.462162 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9qfq\" (UniqueName: \"kubernetes.io/projected/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-kube-api-access-t9qfq\") pod \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\" (UID: \"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801\") " Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.462752 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-utilities" (OuterVolumeSpecName: "utilities") pod "88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" (UID: "88d44ca2-6c87-45ee-b9be-5b7d7f8e6801"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.464063 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.469189 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-kube-api-access-t9qfq" (OuterVolumeSpecName: "kube-api-access-t9qfq") pod "88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" (UID: "88d44ca2-6c87-45ee-b9be-5b7d7f8e6801"). InnerVolumeSpecName "kube-api-access-t9qfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.527280 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" (UID: "88d44ca2-6c87-45ee-b9be-5b7d7f8e6801"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.566399 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:02:21 crc kubenswrapper[4811]: I0128 18:02:21.566451 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9qfq\" (UniqueName: \"kubernetes.io/projected/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801-kube-api-access-t9qfq\") on node \"crc\" DevicePath \"\"" Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.071643 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9h85" event={"ID":"88d44ca2-6c87-45ee-b9be-5b7d7f8e6801","Type":"ContainerDied","Data":"fe64bf3c17d1af90dd6a0f38711a4c3d111f56b76f402a811d515a5f29d08303"} Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.072831 4811 scope.go:117] "RemoveContainer" containerID="4568f4798a7db9e33ab027f34adde147370c047c15180108912b2b03f7863af2" Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.071911 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9h85" Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.106574 4811 scope.go:117] "RemoveContainer" containerID="48a3e38080feac74e9e0d4294d536ddaf334c2603e1d79f703a9d4ddca0eace0" Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.120506 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f9h85"] Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.135830 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f9h85"] Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.147329 4811 scope.go:117] "RemoveContainer" containerID="6e31f21e22ce32d5bf33b2aac8ee150d6b043ec74487e6e3f44dd10bdccd7c52" Jan 28 18:02:22 crc kubenswrapper[4811]: I0128 18:02:22.351240 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" path="/var/lib/kubelet/pods/88d44ca2-6c87-45ee-b9be-5b7d7f8e6801/volumes" Jan 28 18:03:33 crc kubenswrapper[4811]: I0128 18:03:33.087748 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:03:33 crc kubenswrapper[4811]: I0128 18:03:33.088254 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.018696 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kvt7d"] Jan 28 18:03:43 crc kubenswrapper[4811]: E0128 18:03:43.019599 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="registry-server" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.019618 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="registry-server" Jan 28 18:03:43 crc kubenswrapper[4811]: E0128 18:03:43.019643 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="extract-utilities" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.019653 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="extract-utilities" Jan 28 18:03:43 crc kubenswrapper[4811]: E0128 18:03:43.019667 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="extract-content" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.019676 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="extract-content" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.019959 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="88d44ca2-6c87-45ee-b9be-5b7d7f8e6801" containerName="registry-server" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.021843 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.029688 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kvt7d"] Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.153098 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-utilities\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.153540 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8ctg\" (UniqueName: \"kubernetes.io/projected/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-kube-api-access-c8ctg\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.153918 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-catalog-content\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.256254 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-catalog-content\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.256388 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-utilities\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.256816 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-catalog-content\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.256827 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-utilities\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.256945 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8ctg\" (UniqueName: \"kubernetes.io/projected/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-kube-api-access-c8ctg\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.278513 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8ctg\" (UniqueName: \"kubernetes.io/projected/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-kube-api-access-c8ctg\") pod \"redhat-marketplace-kvt7d\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.357019 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.892205 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kvt7d"] Jan 28 18:03:43 crc kubenswrapper[4811]: I0128 18:03:43.988819 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerStarted","Data":"530d34293ba6b9eb509506dd8806fa811f20c1a5305e7d835fe30bba0933d81b"} Jan 28 18:03:45 crc kubenswrapper[4811]: I0128 18:03:44.999548 4811 generic.go:334] "Generic (PLEG): container finished" podID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerID="ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685" exitCode=0 Jan 28 18:03:45 crc kubenswrapper[4811]: I0128 18:03:44.999616 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerDied","Data":"ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685"} Jan 28 18:03:47 crc kubenswrapper[4811]: I0128 18:03:47.024709 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerStarted","Data":"7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1"} Jan 28 18:03:48 crc kubenswrapper[4811]: I0128 18:03:48.041330 4811 generic.go:334] "Generic (PLEG): container finished" podID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerID="7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1" exitCode=0 Jan 28 18:03:48 crc kubenswrapper[4811]: I0128 18:03:48.041998 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerDied","Data":"7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1"} Jan 28 18:03:50 crc kubenswrapper[4811]: I0128 18:03:50.062852 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerStarted","Data":"e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d"} Jan 28 18:03:50 crc kubenswrapper[4811]: I0128 18:03:50.092832 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kvt7d" podStartSLOduration=3.996866129 podStartE2EDuration="8.092810877s" podCreationTimestamp="2026-01-28 18:03:42 +0000 UTC" firstStartedPulling="2026-01-28 18:03:45.003353571 +0000 UTC m=+8317.757717154" lastFinishedPulling="2026-01-28 18:03:49.099298319 +0000 UTC m=+8321.853661902" observedRunningTime="2026-01-28 18:03:50.083542466 +0000 UTC m=+8322.837906059" watchObservedRunningTime="2026-01-28 18:03:50.092810877 +0000 UTC m=+8322.847174450" Jan 28 18:03:53 crc kubenswrapper[4811]: I0128 18:03:53.357626 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:53 crc kubenswrapper[4811]: I0128 18:03:53.358606 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:53 crc kubenswrapper[4811]: I0128 18:03:53.406270 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:54 crc kubenswrapper[4811]: I0128 18:03:54.149307 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:54 crc kubenswrapper[4811]: I0128 18:03:54.212967 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kvt7d"] Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.118579 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kvt7d" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="registry-server" containerID="cri-o://e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d" gracePeriod=2 Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.661788 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.770011 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-utilities\") pod \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.770156 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-catalog-content\") pod \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.770306 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8ctg\" (UniqueName: \"kubernetes.io/projected/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-kube-api-access-c8ctg\") pod \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\" (UID: \"0af2cbdd-8055-452c-8dc9-4701e0fe9e56\") " Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.772383 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-utilities" (OuterVolumeSpecName: "utilities") pod "0af2cbdd-8055-452c-8dc9-4701e0fe9e56" (UID: "0af2cbdd-8055-452c-8dc9-4701e0fe9e56"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.778191 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-kube-api-access-c8ctg" (OuterVolumeSpecName: "kube-api-access-c8ctg") pod "0af2cbdd-8055-452c-8dc9-4701e0fe9e56" (UID: "0af2cbdd-8055-452c-8dc9-4701e0fe9e56"). InnerVolumeSpecName "kube-api-access-c8ctg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.805608 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0af2cbdd-8055-452c-8dc9-4701e0fe9e56" (UID: "0af2cbdd-8055-452c-8dc9-4701e0fe9e56"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.873088 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.873376 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8ctg\" (UniqueName: \"kubernetes.io/projected/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-kube-api-access-c8ctg\") on node \"crc\" DevicePath \"\"" Jan 28 18:03:56 crc kubenswrapper[4811]: I0128 18:03:56.873493 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0af2cbdd-8055-452c-8dc9-4701e0fe9e56-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.131582 4811 generic.go:334] "Generic (PLEG): container finished" podID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerID="e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d" exitCode=0 Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.131643 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerDied","Data":"e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d"} Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.131687 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kvt7d" event={"ID":"0af2cbdd-8055-452c-8dc9-4701e0fe9e56","Type":"ContainerDied","Data":"530d34293ba6b9eb509506dd8806fa811f20c1a5305e7d835fe30bba0933d81b"} Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.131709 4811 scope.go:117] "RemoveContainer" containerID="e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.131714 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kvt7d" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.156311 4811 scope.go:117] "RemoveContainer" containerID="7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.172457 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kvt7d"] Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.184325 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kvt7d"] Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.210180 4811 scope.go:117] "RemoveContainer" containerID="ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.240865 4811 scope.go:117] "RemoveContainer" containerID="e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d" Jan 28 18:03:57 crc kubenswrapper[4811]: E0128 18:03:57.241722 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d\": container with ID starting with e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d not found: ID does not exist" containerID="e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.241782 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d"} err="failed to get container status \"e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d\": rpc error: code = NotFound desc = could not find container \"e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d\": container with ID starting with e4d973bb933d7fe7ed6872c839979257983e0d8923c38f643b91a69ba061494d not found: ID does not exist" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.241810 4811 scope.go:117] "RemoveContainer" containerID="7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1" Jan 28 18:03:57 crc kubenswrapper[4811]: E0128 18:03:57.242721 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1\": container with ID starting with 7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1 not found: ID does not exist" containerID="7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.242758 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1"} err="failed to get container status \"7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1\": rpc error: code = NotFound desc = could not find container \"7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1\": container with ID starting with 7bf36d6c97448e14a04ca9b16012e3ad5520b1339cd709a072475a8aa250afc1 not found: ID does not exist" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.242780 4811 scope.go:117] "RemoveContainer" containerID="ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685" Jan 28 18:03:57 crc kubenswrapper[4811]: E0128 18:03:57.243100 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685\": container with ID starting with ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685 not found: ID does not exist" containerID="ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685" Jan 28 18:03:57 crc kubenswrapper[4811]: I0128 18:03:57.243137 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685"} err="failed to get container status \"ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685\": rpc error: code = NotFound desc = could not find container \"ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685\": container with ID starting with ba7d8488eeac58bf63c8e375ef47690d4f14c7f15dafb5978bb6dcce2eb4a685 not found: ID does not exist" Jan 28 18:03:58 crc kubenswrapper[4811]: I0128 18:03:58.354975 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" path="/var/lib/kubelet/pods/0af2cbdd-8055-452c-8dc9-4701e0fe9e56/volumes" Jan 28 18:04:00 crc kubenswrapper[4811]: I0128 18:04:00.165689 4811 generic.go:334] "Generic (PLEG): container finished" podID="f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" containerID="80abe6b7770312be77eb1d025162fa43f59559b31e83136b8ab5e194c4da2aed" exitCode=0 Jan 28 18:04:00 crc kubenswrapper[4811]: I0128 18:04:00.165777 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" event={"ID":"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170","Type":"ContainerDied","Data":"80abe6b7770312be77eb1d025162fa43f59559b31e83136b8ab5e194c4da2aed"} Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.692975 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.784970 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ssh-key-openstack-cell1\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785097 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-0\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785152 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl6rz\" (UniqueName: \"kubernetes.io/projected/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-kube-api-access-cl6rz\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785218 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-1\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785247 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-0\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785298 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-1\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785333 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ceph\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785413 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-combined-ca-bundle\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785487 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-0\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785635 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-inventory\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.785664 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-1\") pod \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\" (UID: \"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170\") " Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.800259 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.805271 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ceph" (OuterVolumeSpecName: "ceph") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.806754 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-kube-api-access-cl6rz" (OuterVolumeSpecName: "kube-api-access-cl6rz") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "kube-api-access-cl6rz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.832278 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.833128 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.837210 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.837688 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.840667 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.852645 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.869037 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-inventory" (OuterVolumeSpecName: "inventory") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.878012 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" (UID: "f7f2d4fc-f0c1-43a5-adfe-2a91386f5170"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892087 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892371 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892478 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892604 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892713 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl6rz\" (UniqueName: \"kubernetes.io/projected/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-kube-api-access-cl6rz\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892910 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.892997 4811 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.893079 4811 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.893154 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.893224 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:01 crc kubenswrapper[4811]: I0128 18:04:01.893305 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f7f2d4fc-f0c1-43a5-adfe-2a91386f5170-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.188969 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" event={"ID":"f7f2d4fc-f0c1-43a5-adfe-2a91386f5170","Type":"ContainerDied","Data":"89497894222c71cf7c216c1834f84efc04195652ceadb9f8cd3e419239c06d11"} Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.189321 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89497894222c71cf7c216c1834f84efc04195652ceadb9f8cd3e419239c06d11" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.189198 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-k5j98" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.295038 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-pgzw9"] Jan 28 18:04:02 crc kubenswrapper[4811]: E0128 18:04:02.295601 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="extract-utilities" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.295624 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="extract-utilities" Jan 28 18:04:02 crc kubenswrapper[4811]: E0128 18:04:02.295644 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="registry-server" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.295653 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="registry-server" Jan 28 18:04:02 crc kubenswrapper[4811]: E0128 18:04:02.295671 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" containerName="nova-cell1-openstack-openstack-cell1" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.295681 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" containerName="nova-cell1-openstack-openstack-cell1" Jan 28 18:04:02 crc kubenswrapper[4811]: E0128 18:04:02.295721 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="extract-content" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.295729 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="extract-content" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.296018 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7f2d4fc-f0c1-43a5-adfe-2a91386f5170" containerName="nova-cell1-openstack-openstack-cell1" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.296044 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0af2cbdd-8055-452c-8dc9-4701e0fe9e56" containerName="registry-server" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.297851 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.301987 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.302341 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.302524 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.302903 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.303622 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.321880 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-pgzw9"] Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.397642 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bz5np"] Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.400123 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407146 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407204 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-inventory\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407234 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407363 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceph\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407451 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407484 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzcp9\" (UniqueName: \"kubernetes.io/projected/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-kube-api-access-dzcp9\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407569 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.407634 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.412677 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bz5np"] Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.509784 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.509868 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.509898 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.509932 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-inventory\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.509958 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-utilities\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.509983 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.510046 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceph\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.510080 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-catalog-content\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.510098 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.510117 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzcp9\" (UniqueName: \"kubernetes.io/projected/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-kube-api-access-dzcp9\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.510168 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sqqc\" (UniqueName: \"kubernetes.io/projected/5c4f56df-0901-49f4-bd0f-970589e2f8cc-kube-api-access-9sqqc\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.517271 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-inventory\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.519599 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceph\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.519672 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.520329 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.520917 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.523028 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.523653 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.528351 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzcp9\" (UniqueName: \"kubernetes.io/projected/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-kube-api-access-dzcp9\") pod \"telemetry-openstack-openstack-cell1-pgzw9\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.612104 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-utilities\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.612252 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-catalog-content\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.612332 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sqqc\" (UniqueName: \"kubernetes.io/projected/5c4f56df-0901-49f4-bd0f-970589e2f8cc-kube-api-access-9sqqc\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.613216 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-catalog-content\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.613583 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-utilities\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.622721 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.631618 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sqqc\" (UniqueName: \"kubernetes.io/projected/5c4f56df-0901-49f4-bd0f-970589e2f8cc-kube-api-access-9sqqc\") pod \"redhat-operators-bz5np\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:02 crc kubenswrapper[4811]: I0128 18:04:02.730980 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:03 crc kubenswrapper[4811]: I0128 18:04:03.087165 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:04:03 crc kubenswrapper[4811]: I0128 18:04:03.087479 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:04:03 crc kubenswrapper[4811]: I0128 18:04:03.290650 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-pgzw9"] Jan 28 18:04:03 crc kubenswrapper[4811]: W0128 18:04:03.335575 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c4f56df_0901_49f4_bd0f_970589e2f8cc.slice/crio-64adf7032670ec597e40bd2329936fc61b9e0ac4fa704fdd6e68910c8cd63337 WatchSource:0}: Error finding container 64adf7032670ec597e40bd2329936fc61b9e0ac4fa704fdd6e68910c8cd63337: Status 404 returned error can't find the container with id 64adf7032670ec597e40bd2329936fc61b9e0ac4fa704fdd6e68910c8cd63337 Jan 28 18:04:03 crc kubenswrapper[4811]: I0128 18:04:03.344267 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bz5np"] Jan 28 18:04:04 crc kubenswrapper[4811]: I0128 18:04:04.219777 4811 generic.go:334] "Generic (PLEG): container finished" podID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerID="832c7848492a547916c716290818c3793bc9bafbd71cc47ded256ea21ef07098" exitCode=0 Jan 28 18:04:04 crc kubenswrapper[4811]: I0128 18:04:04.220132 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerDied","Data":"832c7848492a547916c716290818c3793bc9bafbd71cc47ded256ea21ef07098"} Jan 28 18:04:04 crc kubenswrapper[4811]: I0128 18:04:04.220169 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerStarted","Data":"64adf7032670ec597e40bd2329936fc61b9e0ac4fa704fdd6e68910c8cd63337"} Jan 28 18:04:04 crc kubenswrapper[4811]: I0128 18:04:04.233285 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" event={"ID":"b594ffd2-2b1d-40c4-9d18-9831dd6c9816","Type":"ContainerStarted","Data":"c1b094b91e8b4c0ee504a96ae4490e0e0eeb94b1210e19137646c5f94b0e5333"} Jan 28 18:04:05 crc kubenswrapper[4811]: I0128 18:04:05.258589 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" event={"ID":"b594ffd2-2b1d-40c4-9d18-9831dd6c9816","Type":"ContainerStarted","Data":"b79d402a26ad8221a2a11dc05ae18d754fc27c146e57d3f7fcd7047c40cd5946"} Jan 28 18:04:05 crc kubenswrapper[4811]: I0128 18:04:05.295699 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" podStartSLOduration=2.135165656 podStartE2EDuration="3.295670982s" podCreationTimestamp="2026-01-28 18:04:02 +0000 UTC" firstStartedPulling="2026-01-28 18:04:03.310914307 +0000 UTC m=+8336.065277890" lastFinishedPulling="2026-01-28 18:04:04.471419633 +0000 UTC m=+8337.225783216" observedRunningTime="2026-01-28 18:04:05.286817711 +0000 UTC m=+8338.041181324" watchObservedRunningTime="2026-01-28 18:04:05.295670982 +0000 UTC m=+8338.050034565" Jan 28 18:04:07 crc kubenswrapper[4811]: I0128 18:04:07.279860 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerStarted","Data":"7888a619e39a4687ee3e80db11ca9668d909e92981ce8a3db0ee145b0ee6b47c"} Jan 28 18:04:09 crc kubenswrapper[4811]: I0128 18:04:09.319001 4811 generic.go:334] "Generic (PLEG): container finished" podID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerID="7888a619e39a4687ee3e80db11ca9668d909e92981ce8a3db0ee145b0ee6b47c" exitCode=0 Jan 28 18:04:09 crc kubenswrapper[4811]: I0128 18:04:09.319228 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerDied","Data":"7888a619e39a4687ee3e80db11ca9668d909e92981ce8a3db0ee145b0ee6b47c"} Jan 28 18:04:18 crc kubenswrapper[4811]: I0128 18:04:18.422118 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerStarted","Data":"37bf4c7e985b2eda711d02668bd0d679316851e818d2b80c0fcd90d553f91a85"} Jan 28 18:04:22 crc kubenswrapper[4811]: I0128 18:04:22.731972 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:22 crc kubenswrapper[4811]: I0128 18:04:22.732498 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:23 crc kubenswrapper[4811]: I0128 18:04:23.779542 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bz5np" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="registry-server" probeResult="failure" output=< Jan 28 18:04:23 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:04:23 crc kubenswrapper[4811]: > Jan 28 18:04:32 crc kubenswrapper[4811]: I0128 18:04:32.784235 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:32 crc kubenswrapper[4811]: I0128 18:04:32.812993 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bz5np" podStartSLOduration=22.897777979 podStartE2EDuration="30.81297542s" podCreationTimestamp="2026-01-28 18:04:02 +0000 UTC" firstStartedPulling="2026-01-28 18:04:04.340344026 +0000 UTC m=+8337.094707609" lastFinishedPulling="2026-01-28 18:04:12.255541467 +0000 UTC m=+8345.009905050" observedRunningTime="2026-01-28 18:04:18.442986657 +0000 UTC m=+8351.197350240" watchObservedRunningTime="2026-01-28 18:04:32.81297542 +0000 UTC m=+8365.567339003" Jan 28 18:04:32 crc kubenswrapper[4811]: I0128 18:04:32.841927 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.088165 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.088509 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.088614 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.089518 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.089670 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" gracePeriod=600 Jan 28 18:04:33 crc kubenswrapper[4811]: E0128 18:04:33.237827 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.588157 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bz5np"] Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.594527 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" exitCode=0 Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.594577 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00"} Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.594692 4811 scope.go:117] "RemoveContainer" containerID="3c50e1a118002ad5673703280109cdd35f9243f49b098e3b9e9d8b0f7d945e39" Jan 28 18:04:33 crc kubenswrapper[4811]: I0128 18:04:33.595064 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:04:33 crc kubenswrapper[4811]: E0128 18:04:33.595320 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:04:34 crc kubenswrapper[4811]: I0128 18:04:34.608494 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bz5np" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="registry-server" containerID="cri-o://37bf4c7e985b2eda711d02668bd0d679316851e818d2b80c0fcd90d553f91a85" gracePeriod=2 Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.623152 4811 generic.go:334] "Generic (PLEG): container finished" podID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerID="37bf4c7e985b2eda711d02668bd0d679316851e818d2b80c0fcd90d553f91a85" exitCode=0 Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.623702 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerDied","Data":"37bf4c7e985b2eda711d02668bd0d679316851e818d2b80c0fcd90d553f91a85"} Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.834575 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.893190 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-utilities\") pod \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.893322 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-catalog-content\") pod \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.893606 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sqqc\" (UniqueName: \"kubernetes.io/projected/5c4f56df-0901-49f4-bd0f-970589e2f8cc-kube-api-access-9sqqc\") pod \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\" (UID: \"5c4f56df-0901-49f4-bd0f-970589e2f8cc\") " Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.896032 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-utilities" (OuterVolumeSpecName: "utilities") pod "5c4f56df-0901-49f4-bd0f-970589e2f8cc" (UID: "5c4f56df-0901-49f4-bd0f-970589e2f8cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.896341 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.921475 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c4f56df-0901-49f4-bd0f-970589e2f8cc-kube-api-access-9sqqc" (OuterVolumeSpecName: "kube-api-access-9sqqc") pod "5c4f56df-0901-49f4-bd0f-970589e2f8cc" (UID: "5c4f56df-0901-49f4-bd0f-970589e2f8cc"). InnerVolumeSpecName "kube-api-access-9sqqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:04:35 crc kubenswrapper[4811]: I0128 18:04:35.998686 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sqqc\" (UniqueName: \"kubernetes.io/projected/5c4f56df-0901-49f4-bd0f-970589e2f8cc-kube-api-access-9sqqc\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.029637 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c4f56df-0901-49f4-bd0f-970589e2f8cc" (UID: "5c4f56df-0901-49f4-bd0f-970589e2f8cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.100695 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c4f56df-0901-49f4-bd0f-970589e2f8cc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.633039 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bz5np" event={"ID":"5c4f56df-0901-49f4-bd0f-970589e2f8cc","Type":"ContainerDied","Data":"64adf7032670ec597e40bd2329936fc61b9e0ac4fa704fdd6e68910c8cd63337"} Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.633100 4811 scope.go:117] "RemoveContainer" containerID="37bf4c7e985b2eda711d02668bd0d679316851e818d2b80c0fcd90d553f91a85" Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.633108 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bz5np" Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.658873 4811 scope.go:117] "RemoveContainer" containerID="7888a619e39a4687ee3e80db11ca9668d909e92981ce8a3db0ee145b0ee6b47c" Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.660239 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bz5np"] Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.670140 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bz5np"] Jan 28 18:04:36 crc kubenswrapper[4811]: I0128 18:04:36.690131 4811 scope.go:117] "RemoveContainer" containerID="832c7848492a547916c716290818c3793bc9bafbd71cc47ded256ea21ef07098" Jan 28 18:04:38 crc kubenswrapper[4811]: I0128 18:04:38.350485 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" path="/var/lib/kubelet/pods/5c4f56df-0901-49f4-bd0f-970589e2f8cc/volumes" Jan 28 18:04:46 crc kubenswrapper[4811]: I0128 18:04:46.339672 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:04:46 crc kubenswrapper[4811]: E0128 18:04:46.340743 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:05:01 crc kubenswrapper[4811]: I0128 18:05:01.339778 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:05:01 crc kubenswrapper[4811]: E0128 18:05:01.340635 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.215651 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bfk27"] Jan 28 18:05:03 crc kubenswrapper[4811]: E0128 18:05:03.216510 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="extract-utilities" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.216526 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="extract-utilities" Jan 28 18:05:03 crc kubenswrapper[4811]: E0128 18:05:03.216552 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="registry-server" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.216559 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="registry-server" Jan 28 18:05:03 crc kubenswrapper[4811]: E0128 18:05:03.216589 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="extract-content" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.216597 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="extract-content" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.216853 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c4f56df-0901-49f4-bd0f-970589e2f8cc" containerName="registry-server" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.218849 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.227668 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bfk27"] Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.263288 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-utilities\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.263364 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njmnm\" (UniqueName: \"kubernetes.io/projected/d7d03eef-1c28-4a33-ab79-f7115e24c511-kube-api-access-njmnm\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.263623 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-catalog-content\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.365370 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-catalog-content\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.365771 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-utilities\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.365888 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njmnm\" (UniqueName: \"kubernetes.io/projected/d7d03eef-1c28-4a33-ab79-f7115e24c511-kube-api-access-njmnm\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.366211 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-catalog-content\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.366299 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-utilities\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.389618 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njmnm\" (UniqueName: \"kubernetes.io/projected/d7d03eef-1c28-4a33-ab79-f7115e24c511-kube-api-access-njmnm\") pod \"certified-operators-bfk27\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:03 crc kubenswrapper[4811]: I0128 18:05:03.578376 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:04 crc kubenswrapper[4811]: I0128 18:05:04.139259 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bfk27"] Jan 28 18:05:04 crc kubenswrapper[4811]: I0128 18:05:04.909346 4811 generic.go:334] "Generic (PLEG): container finished" podID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerID="aeaeb7050f5b1fe2dd087558a89be7d2991b4f1fa351a388119272fe2c626525" exitCode=0 Jan 28 18:05:04 crc kubenswrapper[4811]: I0128 18:05:04.909411 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerDied","Data":"aeaeb7050f5b1fe2dd087558a89be7d2991b4f1fa351a388119272fe2c626525"} Jan 28 18:05:04 crc kubenswrapper[4811]: I0128 18:05:04.909757 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerStarted","Data":"b3283ccb0fe598eedea715105bc4ea616e53f75175150f53c3a2d3251254972b"} Jan 28 18:05:08 crc kubenswrapper[4811]: I0128 18:05:08.947755 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerStarted","Data":"d542d7041266d33d1882c0a65b32ee599b0ee43cd55a38f3db35d88ca0776cca"} Jan 28 18:05:11 crc kubenswrapper[4811]: I0128 18:05:11.983165 4811 generic.go:334] "Generic (PLEG): container finished" podID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerID="d542d7041266d33d1882c0a65b32ee599b0ee43cd55a38f3db35d88ca0776cca" exitCode=0 Jan 28 18:05:11 crc kubenswrapper[4811]: I0128 18:05:11.983392 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerDied","Data":"d542d7041266d33d1882c0a65b32ee599b0ee43cd55a38f3db35d88ca0776cca"} Jan 28 18:05:13 crc kubenswrapper[4811]: I0128 18:05:13.017418 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerStarted","Data":"f8db6b5035ca898cc66c977730625295c219f5777c73893cfd4f3d0590499fee"} Jan 28 18:05:13 crc kubenswrapper[4811]: I0128 18:05:13.045415 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bfk27" podStartSLOduration=2.456112768 podStartE2EDuration="10.045395062s" podCreationTimestamp="2026-01-28 18:05:03 +0000 UTC" firstStartedPulling="2026-01-28 18:05:04.913330424 +0000 UTC m=+8397.667694007" lastFinishedPulling="2026-01-28 18:05:12.502612718 +0000 UTC m=+8405.256976301" observedRunningTime="2026-01-28 18:05:13.039919243 +0000 UTC m=+8405.794282846" watchObservedRunningTime="2026-01-28 18:05:13.045395062 +0000 UTC m=+8405.799758645" Jan 28 18:05:13 crc kubenswrapper[4811]: I0128 18:05:13.341992 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:05:13 crc kubenswrapper[4811]: E0128 18:05:13.342581 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:05:13 crc kubenswrapper[4811]: I0128 18:05:13.579292 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:13 crc kubenswrapper[4811]: I0128 18:05:13.579363 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:14 crc kubenswrapper[4811]: I0128 18:05:14.646894 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-bfk27" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="registry-server" probeResult="failure" output=< Jan 28 18:05:14 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:05:14 crc kubenswrapper[4811]: > Jan 28 18:05:23 crc kubenswrapper[4811]: I0128 18:05:23.635598 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:23 crc kubenswrapper[4811]: I0128 18:05:23.687900 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:23 crc kubenswrapper[4811]: I0128 18:05:23.887538 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bfk27"] Jan 28 18:05:25 crc kubenswrapper[4811]: I0128 18:05:25.127267 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bfk27" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="registry-server" containerID="cri-o://f8db6b5035ca898cc66c977730625295c219f5777c73893cfd4f3d0590499fee" gracePeriod=2 Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.143663 4811 generic.go:334] "Generic (PLEG): container finished" podID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerID="f8db6b5035ca898cc66c977730625295c219f5777c73893cfd4f3d0590499fee" exitCode=0 Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.143773 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerDied","Data":"f8db6b5035ca898cc66c977730625295c219f5777c73893cfd4f3d0590499fee"} Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.143983 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfk27" event={"ID":"d7d03eef-1c28-4a33-ab79-f7115e24c511","Type":"ContainerDied","Data":"b3283ccb0fe598eedea715105bc4ea616e53f75175150f53c3a2d3251254972b"} Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.144002 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3283ccb0fe598eedea715105bc4ea616e53f75175150f53c3a2d3251254972b" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.157527 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.290978 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njmnm\" (UniqueName: \"kubernetes.io/projected/d7d03eef-1c28-4a33-ab79-f7115e24c511-kube-api-access-njmnm\") pod \"d7d03eef-1c28-4a33-ab79-f7115e24c511\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.291104 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-utilities\") pod \"d7d03eef-1c28-4a33-ab79-f7115e24c511\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.291216 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-catalog-content\") pod \"d7d03eef-1c28-4a33-ab79-f7115e24c511\" (UID: \"d7d03eef-1c28-4a33-ab79-f7115e24c511\") " Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.293174 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-utilities" (OuterVolumeSpecName: "utilities") pod "d7d03eef-1c28-4a33-ab79-f7115e24c511" (UID: "d7d03eef-1c28-4a33-ab79-f7115e24c511"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.296366 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7d03eef-1c28-4a33-ab79-f7115e24c511-kube-api-access-njmnm" (OuterVolumeSpecName: "kube-api-access-njmnm") pod "d7d03eef-1c28-4a33-ab79-f7115e24c511" (UID: "d7d03eef-1c28-4a33-ab79-f7115e24c511"). InnerVolumeSpecName "kube-api-access-njmnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.343670 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7d03eef-1c28-4a33-ab79-f7115e24c511" (UID: "d7d03eef-1c28-4a33-ab79-f7115e24c511"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.397059 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njmnm\" (UniqueName: \"kubernetes.io/projected/d7d03eef-1c28-4a33-ab79-f7115e24c511-kube-api-access-njmnm\") on node \"crc\" DevicePath \"\"" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.397101 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:05:26 crc kubenswrapper[4811]: I0128 18:05:26.397114 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7d03eef-1c28-4a33-ab79-f7115e24c511-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:05:27 crc kubenswrapper[4811]: I0128 18:05:27.155747 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bfk27" Jan 28 18:05:27 crc kubenswrapper[4811]: I0128 18:05:27.183745 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bfk27"] Jan 28 18:05:27 crc kubenswrapper[4811]: I0128 18:05:27.197374 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bfk27"] Jan 28 18:05:27 crc kubenswrapper[4811]: I0128 18:05:27.341609 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:05:27 crc kubenswrapper[4811]: E0128 18:05:27.341922 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:05:28 crc kubenswrapper[4811]: I0128 18:05:28.354276 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" path="/var/lib/kubelet/pods/d7d03eef-1c28-4a33-ab79-f7115e24c511/volumes" Jan 28 18:05:42 crc kubenswrapper[4811]: I0128 18:05:42.340551 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:05:42 crc kubenswrapper[4811]: E0128 18:05:42.342082 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:05:53 crc kubenswrapper[4811]: I0128 18:05:53.340495 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:05:53 crc kubenswrapper[4811]: E0128 18:05:53.341255 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:06:05 crc kubenswrapper[4811]: I0128 18:06:05.339842 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:06:05 crc kubenswrapper[4811]: E0128 18:06:05.340842 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:06:20 crc kubenswrapper[4811]: I0128 18:06:20.339960 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:06:20 crc kubenswrapper[4811]: E0128 18:06:20.340780 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:06:31 crc kubenswrapper[4811]: I0128 18:06:31.340829 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:06:31 crc kubenswrapper[4811]: E0128 18:06:31.343712 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:06:46 crc kubenswrapper[4811]: I0128 18:06:46.340118 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:06:46 crc kubenswrapper[4811]: E0128 18:06:46.340754 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:06:58 crc kubenswrapper[4811]: I0128 18:06:58.348908 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:06:58 crc kubenswrapper[4811]: E0128 18:06:58.349863 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:07:10 crc kubenswrapper[4811]: I0128 18:07:10.339478 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:07:10 crc kubenswrapper[4811]: E0128 18:07:10.340255 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:07:21 crc kubenswrapper[4811]: I0128 18:07:21.339623 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:07:21 crc kubenswrapper[4811]: E0128 18:07:21.340410 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:07:29 crc kubenswrapper[4811]: I0128 18:07:29.775539 4811 generic.go:334] "Generic (PLEG): container finished" podID="b594ffd2-2b1d-40c4-9d18-9831dd6c9816" containerID="b79d402a26ad8221a2a11dc05ae18d754fc27c146e57d3f7fcd7047c40cd5946" exitCode=0 Jan 28 18:07:29 crc kubenswrapper[4811]: I0128 18:07:29.775674 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" event={"ID":"b594ffd2-2b1d-40c4-9d18-9831dd6c9816","Type":"ContainerDied","Data":"b79d402a26ad8221a2a11dc05ae18d754fc27c146e57d3f7fcd7047c40cd5946"} Jan 28 18:07:31 crc kubenswrapper[4811]: I0128 18:07:31.806690 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" event={"ID":"b594ffd2-2b1d-40c4-9d18-9831dd6c9816","Type":"ContainerDied","Data":"c1b094b91e8b4c0ee504a96ae4490e0e0eeb94b1210e19137646c5f94b0e5333"} Jan 28 18:07:31 crc kubenswrapper[4811]: I0128 18:07:31.807166 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1b094b91e8b4c0ee504a96ae4490e0e0eeb94b1210e19137646c5f94b0e5333" Jan 28 18:07:31 crc kubenswrapper[4811]: I0128 18:07:31.864017 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.010600 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-0\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.010755 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzcp9\" (UniqueName: \"kubernetes.io/projected/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-kube-api-access-dzcp9\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.010902 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-inventory\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.011471 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-1\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.011625 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-2\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.011664 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceph\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.011792 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ssh-key-openstack-cell1\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.011949 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-telemetry-combined-ca-bundle\") pod \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\" (UID: \"b594ffd2-2b1d-40c4-9d18-9831dd6c9816\") " Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.017215 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-kube-api-access-dzcp9" (OuterVolumeSpecName: "kube-api-access-dzcp9") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "kube-api-access-dzcp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.017332 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.018805 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceph" (OuterVolumeSpecName: "ceph") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.044715 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.047001 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-inventory" (OuterVolumeSpecName: "inventory") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.047951 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.060340 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.061943 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "b594ffd2-2b1d-40c4-9d18-9831dd6c9816" (UID: "b594ffd2-2b1d-40c4-9d18-9831dd6c9816"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114807 4811 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114847 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114863 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114875 4811 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114884 4811 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114902 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzcp9\" (UniqueName: \"kubernetes.io/projected/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-kube-api-access-dzcp9\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114912 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.114922 4811 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b594ffd2-2b1d-40c4-9d18-9831dd6c9816-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.816924 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-pgzw9" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.983470 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7gc2q"] Jan 28 18:07:32 crc kubenswrapper[4811]: E0128 18:07:32.984015 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="extract-utilities" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.984039 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="extract-utilities" Jan 28 18:07:32 crc kubenswrapper[4811]: E0128 18:07:32.984064 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="extract-content" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.984075 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="extract-content" Jan 28 18:07:32 crc kubenswrapper[4811]: E0128 18:07:32.984098 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="registry-server" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.984107 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="registry-server" Jan 28 18:07:32 crc kubenswrapper[4811]: E0128 18:07:32.984122 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b594ffd2-2b1d-40c4-9d18-9831dd6c9816" containerName="telemetry-openstack-openstack-cell1" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.984130 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b594ffd2-2b1d-40c4-9d18-9831dd6c9816" containerName="telemetry-openstack-openstack-cell1" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.984399 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d03eef-1c28-4a33-ab79-f7115e24c511" containerName="registry-server" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.984423 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b594ffd2-2b1d-40c4-9d18-9831dd6c9816" containerName="telemetry-openstack-openstack-cell1" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.985418 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.988512 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.988746 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.989332 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 18:07:32 crc kubenswrapper[4811]: I0128 18:07:32.991769 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:32.998286 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.002937 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7gc2q"] Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.136949 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.137275 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.137299 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ngl9\" (UniqueName: \"kubernetes.io/projected/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-kube-api-access-7ngl9\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.137440 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.137519 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.137719 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.240126 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.240342 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.240420 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ngl9\" (UniqueName: \"kubernetes.io/projected/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-kube-api-access-7ngl9\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.240516 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.240588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.240696 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.246032 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.246035 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.246557 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.247506 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.249371 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.263542 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ngl9\" (UniqueName: \"kubernetes.io/projected/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-kube-api-access-7ngl9\") pod \"neutron-sriov-openstack-openstack-cell1-7gc2q\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.303187 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.864930 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7gc2q"] Jan 28 18:07:33 crc kubenswrapper[4811]: I0128 18:07:33.869588 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 18:07:34 crc kubenswrapper[4811]: I0128 18:07:34.836249 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" event={"ID":"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b","Type":"ContainerStarted","Data":"4b80cdd2e6edaaf4322df343dfce79fed6969e8869452df59cc9f9ea917b4f5c"} Jan 28 18:07:34 crc kubenswrapper[4811]: I0128 18:07:34.836591 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" event={"ID":"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b","Type":"ContainerStarted","Data":"45454d125e7e1ed765e32fcb97587e8cb753b40bd33d6e04080f906e7ce67093"} Jan 28 18:07:34 crc kubenswrapper[4811]: I0128 18:07:34.860554 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" podStartSLOduration=2.30311423 podStartE2EDuration="2.860535612s" podCreationTimestamp="2026-01-28 18:07:32 +0000 UTC" firstStartedPulling="2026-01-28 18:07:33.869314555 +0000 UTC m=+8546.623678138" lastFinishedPulling="2026-01-28 18:07:34.426735937 +0000 UTC m=+8547.181099520" observedRunningTime="2026-01-28 18:07:34.85003832 +0000 UTC m=+8547.604401923" watchObservedRunningTime="2026-01-28 18:07:34.860535612 +0000 UTC m=+8547.614899195" Jan 28 18:07:35 crc kubenswrapper[4811]: I0128 18:07:35.339814 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:07:35 crc kubenswrapper[4811]: E0128 18:07:35.340312 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:07:46 crc kubenswrapper[4811]: I0128 18:07:46.339873 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:07:46 crc kubenswrapper[4811]: E0128 18:07:46.342788 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:08:01 crc kubenswrapper[4811]: I0128 18:08:01.340422 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:08:01 crc kubenswrapper[4811]: E0128 18:08:01.341114 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:08:16 crc kubenswrapper[4811]: I0128 18:08:16.340757 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:08:16 crc kubenswrapper[4811]: E0128 18:08:16.342333 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:08:29 crc kubenswrapper[4811]: I0128 18:08:29.340236 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:08:29 crc kubenswrapper[4811]: E0128 18:08:29.340903 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:08:44 crc kubenswrapper[4811]: I0128 18:08:44.339751 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:08:44 crc kubenswrapper[4811]: E0128 18:08:44.340729 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:08:58 crc kubenswrapper[4811]: I0128 18:08:58.346317 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:08:58 crc kubenswrapper[4811]: E0128 18:08:58.347176 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:09:13 crc kubenswrapper[4811]: I0128 18:09:13.339867 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:09:13 crc kubenswrapper[4811]: E0128 18:09:13.340651 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:09:26 crc kubenswrapper[4811]: I0128 18:09:26.339521 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:09:26 crc kubenswrapper[4811]: E0128 18:09:26.340473 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:09:39 crc kubenswrapper[4811]: I0128 18:09:39.339585 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:09:39 crc kubenswrapper[4811]: I0128 18:09:39.690200 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"aeca1ea953b265c0f4e346981f2f377031cd40369c372fe999b89d4262909504"} Jan 28 18:10:16 crc kubenswrapper[4811]: I0128 18:10:16.051724 4811 generic.go:334] "Generic (PLEG): container finished" podID="5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" containerID="4b80cdd2e6edaaf4322df343dfce79fed6969e8869452df59cc9f9ea917b4f5c" exitCode=0 Jan 28 18:10:16 crc kubenswrapper[4811]: I0128 18:10:16.051814 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" event={"ID":"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b","Type":"ContainerDied","Data":"4b80cdd2e6edaaf4322df343dfce79fed6969e8869452df59cc9f9ea917b4f5c"} Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.543136 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.603144 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ssh-key-openstack-cell1\") pod \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.606412 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-combined-ca-bundle\") pod \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.606501 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-agent-neutron-config-0\") pod \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.606530 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ceph\") pod \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.606550 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-inventory\") pod \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.606743 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ngl9\" (UniqueName: \"kubernetes.io/projected/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-kube-api-access-7ngl9\") pod \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\" (UID: \"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b\") " Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.611803 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" (UID: "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.611987 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-kube-api-access-7ngl9" (OuterVolumeSpecName: "kube-api-access-7ngl9") pod "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" (UID: "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b"). InnerVolumeSpecName "kube-api-access-7ngl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.626581 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ceph" (OuterVolumeSpecName: "ceph") pod "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" (UID: "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.633504 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" (UID: "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.633631 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-inventory" (OuterVolumeSpecName: "inventory") pod "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" (UID: "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.648198 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" (UID: "5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.709241 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.709276 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.709287 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.709298 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.709309 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 18:10:17 crc kubenswrapper[4811]: I0128 18:10:17.709318 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ngl9\" (UniqueName: \"kubernetes.io/projected/5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b-kube-api-access-7ngl9\") on node \"crc\" DevicePath \"\"" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.073553 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" event={"ID":"5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b","Type":"ContainerDied","Data":"45454d125e7e1ed765e32fcb97587e8cb753b40bd33d6e04080f906e7ce67093"} Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.073604 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7gc2q" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.073607 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45454d125e7e1ed765e32fcb97587e8cb753b40bd33d6e04080f906e7ce67093" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.169361 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx"] Jan 28 18:10:18 crc kubenswrapper[4811]: E0128 18:10:18.169981 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" containerName="neutron-sriov-openstack-openstack-cell1" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.170006 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" containerName="neutron-sriov-openstack-openstack-cell1" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.170264 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b" containerName="neutron-sriov-openstack-openstack-cell1" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.172457 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.174461 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.174536 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.174663 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.175726 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.175864 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.250486 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx"] Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.326864 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g55d\" (UniqueName: \"kubernetes.io/projected/b42afac6-403d-45e8-9d45-23c58fbe0dfa-kube-api-access-6g55d\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.326945 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.327007 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.327140 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.327219 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.327332 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.429564 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.429896 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.430016 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.430087 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.430171 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.430219 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g55d\" (UniqueName: \"kubernetes.io/projected/b42afac6-403d-45e8-9d45-23c58fbe0dfa-kube-api-access-6g55d\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.434999 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.435715 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.439848 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.440180 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.452075 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.455704 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g55d\" (UniqueName: \"kubernetes.io/projected/b42afac6-403d-45e8-9d45-23c58fbe0dfa-kube-api-access-6g55d\") pod \"neutron-dhcp-openstack-openstack-cell1-d2jdx\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:18 crc kubenswrapper[4811]: I0128 18:10:18.495332 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:10:19 crc kubenswrapper[4811]: I0128 18:10:19.163056 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx"] Jan 28 18:10:20 crc kubenswrapper[4811]: I0128 18:10:20.096346 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" event={"ID":"b42afac6-403d-45e8-9d45-23c58fbe0dfa","Type":"ContainerStarted","Data":"54bf6c0118f16e431a0758811793d2f968182b243c6816da2264ac6bf3dfe67e"} Jan 28 18:10:21 crc kubenswrapper[4811]: I0128 18:10:21.110239 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" event={"ID":"b42afac6-403d-45e8-9d45-23c58fbe0dfa","Type":"ContainerStarted","Data":"0632d5e06fe28505e1b269e6da9f4601872ad4fc62614f1645e9bac4d78fe28c"} Jan 28 18:10:21 crc kubenswrapper[4811]: I0128 18:10:21.139163 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" podStartSLOduration=2.627799954 podStartE2EDuration="3.139136586s" podCreationTimestamp="2026-01-28 18:10:18 +0000 UTC" firstStartedPulling="2026-01-28 18:10:19.168507271 +0000 UTC m=+8711.922870854" lastFinishedPulling="2026-01-28 18:10:19.679843903 +0000 UTC m=+8712.434207486" observedRunningTime="2026-01-28 18:10:21.131027229 +0000 UTC m=+8713.885390812" watchObservedRunningTime="2026-01-28 18:10:21.139136586 +0000 UTC m=+8713.893500169" Jan 28 18:11:37 crc kubenswrapper[4811]: I0128 18:11:37.772655 4811 scope.go:117] "RemoveContainer" containerID="aeaeb7050f5b1fe2dd087558a89be7d2991b4f1fa351a388119272fe2c626525" Jan 28 18:11:37 crc kubenswrapper[4811]: I0128 18:11:37.803786 4811 scope.go:117] "RemoveContainer" containerID="f8db6b5035ca898cc66c977730625295c219f5777c73893cfd4f3d0590499fee" Jan 28 18:11:37 crc kubenswrapper[4811]: I0128 18:11:37.856976 4811 scope.go:117] "RemoveContainer" containerID="d542d7041266d33d1882c0a65b32ee599b0ee43cd55a38f3db35d88ca0776cca" Jan 28 18:12:03 crc kubenswrapper[4811]: I0128 18:12:03.089512 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:12:03 crc kubenswrapper[4811]: I0128 18:12:03.091614 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:12:33 crc kubenswrapper[4811]: I0128 18:12:33.086812 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:12:33 crc kubenswrapper[4811]: I0128 18:12:33.088331 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.087495 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.088032 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.088075 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.089303 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aeca1ea953b265c0f4e346981f2f377031cd40369c372fe999b89d4262909504"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.089378 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://aeca1ea953b265c0f4e346981f2f377031cd40369c372fe999b89d4262909504" gracePeriod=600 Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.772951 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="aeca1ea953b265c0f4e346981f2f377031cd40369c372fe999b89d4262909504" exitCode=0 Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.773025 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"aeca1ea953b265c0f4e346981f2f377031cd40369c372fe999b89d4262909504"} Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.773798 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12"} Jan 28 18:13:03 crc kubenswrapper[4811]: I0128 18:13:03.773846 4811 scope.go:117] "RemoveContainer" containerID="52e2af28e60ab3db25494ca65635994bbcd159056152d6f183dc3f10f6758b00" Jan 28 18:13:16 crc kubenswrapper[4811]: I0128 18:13:16.917516 4811 generic.go:334] "Generic (PLEG): container finished" podID="b42afac6-403d-45e8-9d45-23c58fbe0dfa" containerID="0632d5e06fe28505e1b269e6da9f4601872ad4fc62614f1645e9bac4d78fe28c" exitCode=0 Jan 28 18:13:16 crc kubenswrapper[4811]: I0128 18:13:16.917581 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" event={"ID":"b42afac6-403d-45e8-9d45-23c58fbe0dfa","Type":"ContainerDied","Data":"0632d5e06fe28505e1b269e6da9f4601872ad4fc62614f1645e9bac4d78fe28c"} Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.815940 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.937962 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" event={"ID":"b42afac6-403d-45e8-9d45-23c58fbe0dfa","Type":"ContainerDied","Data":"54bf6c0118f16e431a0758811793d2f968182b243c6816da2264ac6bf3dfe67e"} Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.938005 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54bf6c0118f16e431a0758811793d2f968182b243c6816da2264ac6bf3dfe67e" Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.938030 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-d2jdx" Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.981243 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-combined-ca-bundle\") pod \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.981321 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ceph\") pod \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.981363 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g55d\" (UniqueName: \"kubernetes.io/projected/b42afac6-403d-45e8-9d45-23c58fbe0dfa-kube-api-access-6g55d\") pod \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.981538 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-agent-neutron-config-0\") pod \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.981667 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-inventory\") pod \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.981875 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ssh-key-openstack-cell1\") pod \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\" (UID: \"b42afac6-403d-45e8-9d45-23c58fbe0dfa\") " Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.994708 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b42afac6-403d-45e8-9d45-23c58fbe0dfa-kube-api-access-6g55d" (OuterVolumeSpecName: "kube-api-access-6g55d") pod "b42afac6-403d-45e8-9d45-23c58fbe0dfa" (UID: "b42afac6-403d-45e8-9d45-23c58fbe0dfa"). InnerVolumeSpecName "kube-api-access-6g55d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:13:18 crc kubenswrapper[4811]: I0128 18:13:18.996337 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ceph" (OuterVolumeSpecName: "ceph") pod "b42afac6-403d-45e8-9d45-23c58fbe0dfa" (UID: "b42afac6-403d-45e8-9d45-23c58fbe0dfa"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.015727 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "b42afac6-403d-45e8-9d45-23c58fbe0dfa" (UID: "b42afac6-403d-45e8-9d45-23c58fbe0dfa"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.041664 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "b42afac6-403d-45e8-9d45-23c58fbe0dfa" (UID: "b42afac6-403d-45e8-9d45-23c58fbe0dfa"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.054104 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "b42afac6-403d-45e8-9d45-23c58fbe0dfa" (UID: "b42afac6-403d-45e8-9d45-23c58fbe0dfa"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.061340 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-inventory" (OuterVolumeSpecName: "inventory") pod "b42afac6-403d-45e8-9d45-23c58fbe0dfa" (UID: "b42afac6-403d-45e8-9d45-23c58fbe0dfa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.085820 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.085875 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.085890 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.085900 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.085911 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g55d\" (UniqueName: \"kubernetes.io/projected/b42afac6-403d-45e8-9d45-23c58fbe0dfa-kube-api-access-6g55d\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:19 crc kubenswrapper[4811]: I0128 18:13:19.085924 4811 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b42afac6-403d-45e8-9d45-23c58fbe0dfa-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:41 crc kubenswrapper[4811]: I0128 18:13:41.287193 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 18:13:41 crc kubenswrapper[4811]: I0128 18:13:41.287996 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="3783b6d8-2a96-4199-98ae-03f37d52317c" containerName="nova-cell0-conductor-conductor" containerID="cri-o://2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf" gracePeriod=30 Jan 28 18:13:41 crc kubenswrapper[4811]: I0128 18:13:41.857687 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 18:13:41 crc kubenswrapper[4811]: I0128 18:13:41.857905 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" containerName="nova-cell1-conductor-conductor" containerID="cri-o://66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a" gracePeriod=30 Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.023773 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.024052 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" containerName="nova-scheduler-scheduler" containerID="cri-o://dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be" gracePeriod=30 Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.038003 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.038269 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-log" containerID="cri-o://5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b" gracePeriod=30 Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.038398 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-api" containerID="cri-o://3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781" gracePeriod=30 Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.059055 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.059992 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-log" containerID="cri-o://5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692" gracePeriod=30 Jan 28 18:13:42 crc kubenswrapper[4811]: I0128 18:13:42.060061 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-metadata" containerID="cri-o://b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e" gracePeriod=30 Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.097069 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.135428 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-combined-ca-bundle\") pod \"3783b6d8-2a96-4199-98ae-03f37d52317c\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.135614 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhncz\" (UniqueName: \"kubernetes.io/projected/3783b6d8-2a96-4199-98ae-03f37d52317c-kube-api-access-zhncz\") pod \"3783b6d8-2a96-4199-98ae-03f37d52317c\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.135703 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-config-data\") pod \"3783b6d8-2a96-4199-98ae-03f37d52317c\" (UID: \"3783b6d8-2a96-4199-98ae-03f37d52317c\") " Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.141124 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3783b6d8-2a96-4199-98ae-03f37d52317c-kube-api-access-zhncz" (OuterVolumeSpecName: "kube-api-access-zhncz") pod "3783b6d8-2a96-4199-98ae-03f37d52317c" (UID: "3783b6d8-2a96-4199-98ae-03f37d52317c"). InnerVolumeSpecName "kube-api-access-zhncz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.177092 4811 generic.go:334] "Generic (PLEG): container finished" podID="991f7aba-d957-49d8-97ec-8a7044d24867" containerID="5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692" exitCode=143 Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.177213 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"991f7aba-d957-49d8-97ec-8a7044d24867","Type":"ContainerDied","Data":"5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692"} Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.177624 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3783b6d8-2a96-4199-98ae-03f37d52317c" (UID: "3783b6d8-2a96-4199-98ae-03f37d52317c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.179463 4811 generic.go:334] "Generic (PLEG): container finished" podID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerID="5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b" exitCode=143 Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.179512 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b6fd0120-4e8a-4982-b126-29b86e3c8e57","Type":"ContainerDied","Data":"5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b"} Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.181391 4811 generic.go:334] "Generic (PLEG): container finished" podID="3783b6d8-2a96-4199-98ae-03f37d52317c" containerID="2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf" exitCode=0 Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.181472 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.181464 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3783b6d8-2a96-4199-98ae-03f37d52317c","Type":"ContainerDied","Data":"2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf"} Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.181611 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3783b6d8-2a96-4199-98ae-03f37d52317c","Type":"ContainerDied","Data":"5c5da60e7a4601c022af8ce3f81db98b9dc14129a9455cb41104c95751139624"} Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.181636 4811 scope.go:117] "RemoveContainer" containerID="2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.195878 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-config-data" (OuterVolumeSpecName: "config-data") pod "3783b6d8-2a96-4199-98ae-03f37d52317c" (UID: "3783b6d8-2a96-4199-98ae-03f37d52317c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.237987 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.238024 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3783b6d8-2a96-4199-98ae-03f37d52317c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.238037 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhncz\" (UniqueName: \"kubernetes.io/projected/3783b6d8-2a96-4199-98ae-03f37d52317c-kube-api-access-zhncz\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.292628 4811 scope.go:117] "RemoveContainer" containerID="2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf" Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.292984 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf\": container with ID starting with 2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf not found: ID does not exist" containerID="2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.293015 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf"} err="failed to get container status \"2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf\": rpc error: code = NotFound desc = could not find container \"2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf\": container with ID starting with 2d4b5b10d2626bbf055cbaa147daa911685d289c63569ed2916effb3319aefbf not found: ID does not exist" Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.299208 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.300174 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.301420 4811 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.301483 4811 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" containerName="nova-cell1-conductor-conductor" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.519448 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.533351 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.553515 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.554207 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3783b6d8-2a96-4199-98ae-03f37d52317c" containerName="nova-cell0-conductor-conductor" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.554230 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="3783b6d8-2a96-4199-98ae-03f37d52317c" containerName="nova-cell0-conductor-conductor" Jan 28 18:13:43 crc kubenswrapper[4811]: E0128 18:13:43.554247 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b42afac6-403d-45e8-9d45-23c58fbe0dfa" containerName="neutron-dhcp-openstack-openstack-cell1" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.554256 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b42afac6-403d-45e8-9d45-23c58fbe0dfa" containerName="neutron-dhcp-openstack-openstack-cell1" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.554586 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="3783b6d8-2a96-4199-98ae-03f37d52317c" containerName="nova-cell0-conductor-conductor" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.554612 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b42afac6-403d-45e8-9d45-23c58fbe0dfa" containerName="neutron-dhcp-openstack-openstack-cell1" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.555893 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.559007 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.565252 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.650517 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5gvt\" (UniqueName: \"kubernetes.io/projected/83d34ccd-1f43-44ad-962d-f9644e97dc18-kube-api-access-g5gvt\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.650610 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d34ccd-1f43-44ad-962d-f9644e97dc18-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.650739 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d34ccd-1f43-44ad-962d-f9644e97dc18-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.752397 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d34ccd-1f43-44ad-962d-f9644e97dc18-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.753120 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5gvt\" (UniqueName: \"kubernetes.io/projected/83d34ccd-1f43-44ad-962d-f9644e97dc18-kube-api-access-g5gvt\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.753230 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d34ccd-1f43-44ad-962d-f9644e97dc18-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.895341 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d34ccd-1f43-44ad-962d-f9644e97dc18-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.896876 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d34ccd-1f43-44ad-962d-f9644e97dc18-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:43 crc kubenswrapper[4811]: I0128 18:13:43.906038 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5gvt\" (UniqueName: \"kubernetes.io/projected/83d34ccd-1f43-44ad-962d-f9644e97dc18-kube-api-access-g5gvt\") pod \"nova-cell0-conductor-0\" (UID: \"83d34ccd-1f43-44ad-962d-f9644e97dc18\") " pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:44 crc kubenswrapper[4811]: I0128 18:13:44.179505 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:44 crc kubenswrapper[4811]: I0128 18:13:44.354309 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3783b6d8-2a96-4199-98ae-03f37d52317c" path="/var/lib/kubelet/pods/3783b6d8-2a96-4199-98ae-03f37d52317c/volumes" Jan 28 18:13:44 crc kubenswrapper[4811]: I0128 18:13:44.631715 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 28 18:13:44 crc kubenswrapper[4811]: W0128 18:13:44.641827 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83d34ccd_1f43_44ad_962d_f9644e97dc18.slice/crio-aebe02e701771d8650d95c59774e50d5afe297f163928471775b35596b43123c WatchSource:0}: Error finding container aebe02e701771d8650d95c59774e50d5afe297f163928471775b35596b43123c: Status 404 returned error can't find the container with id aebe02e701771d8650d95c59774e50d5afe297f163928471775b35596b43123c Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.204137 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"83d34ccd-1f43-44ad-962d-f9644e97dc18","Type":"ContainerStarted","Data":"ed6adb83ead9754ad7365887d67cd7275891b8668ac8ac7284264c69a7002bcb"} Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.204413 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"83d34ccd-1f43-44ad-962d-f9644e97dc18","Type":"ContainerStarted","Data":"aebe02e701771d8650d95c59774e50d5afe297f163928471775b35596b43123c"} Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.204470 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.220190 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.220171048 podStartE2EDuration="2.220171048s" podCreationTimestamp="2026-01-28 18:13:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:13:45.217320621 +0000 UTC m=+8917.971684214" watchObservedRunningTime="2026-01-28 18:13:45.220171048 +0000 UTC m=+8917.974534631" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.380056 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": dial tcp 10.217.1.85:8775: connect: connection refused" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.380081 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": dial tcp 10.217.1.85:8775: connect: connection refused" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.825365 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.896062 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmrv8\" (UniqueName: \"kubernetes.io/projected/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-kube-api-access-rmrv8\") pod \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.896215 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-config-data\") pod \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.896309 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-combined-ca-bundle\") pod \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\" (UID: \"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a\") " Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.918789 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-kube-api-access-rmrv8" (OuterVolumeSpecName: "kube-api-access-rmrv8") pod "17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" (UID: "17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a"). InnerVolumeSpecName "kube-api-access-rmrv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.945535 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" (UID: "17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.952990 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-config-data" (OuterVolumeSpecName: "config-data") pod "17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" (UID: "17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.974039 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 18:13:45 crc kubenswrapper[4811]: I0128 18:13:45.986508 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.997858 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz9t2\" (UniqueName: \"kubernetes.io/projected/b6fd0120-4e8a-4982-b126-29b86e3c8e57-kube-api-access-cz9t2\") pod \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.997971 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-config-data\") pod \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.998086 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6fd0120-4e8a-4982-b126-29b86e3c8e57-logs\") pod \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.998106 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-combined-ca-bundle\") pod \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\" (UID: \"b6fd0120-4e8a-4982-b126-29b86e3c8e57\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.998580 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.998612 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmrv8\" (UniqueName: \"kubernetes.io/projected/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-kube-api-access-rmrv8\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:45.998623 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.010019 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6fd0120-4e8a-4982-b126-29b86e3c8e57-logs" (OuterVolumeSpecName: "logs") pod "b6fd0120-4e8a-4982-b126-29b86e3c8e57" (UID: "b6fd0120-4e8a-4982-b126-29b86e3c8e57"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.010705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6fd0120-4e8a-4982-b126-29b86e3c8e57-kube-api-access-cz9t2" (OuterVolumeSpecName: "kube-api-access-cz9t2") pod "b6fd0120-4e8a-4982-b126-29b86e3c8e57" (UID: "b6fd0120-4e8a-4982-b126-29b86e3c8e57"). InnerVolumeSpecName "kube-api-access-cz9t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.078586 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-config-data" (OuterVolumeSpecName: "config-data") pod "b6fd0120-4e8a-4982-b126-29b86e3c8e57" (UID: "b6fd0120-4e8a-4982-b126-29b86e3c8e57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.078679 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6fd0120-4e8a-4982-b126-29b86e3c8e57" (UID: "b6fd0120-4e8a-4982-b126-29b86e3c8e57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.103138 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f7aba-d957-49d8-97ec-8a7044d24867-logs\") pod \"991f7aba-d957-49d8-97ec-8a7044d24867\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.103369 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-config-data\") pod \"991f7aba-d957-49d8-97ec-8a7044d24867\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.103396 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2jks\" (UniqueName: \"kubernetes.io/projected/991f7aba-d957-49d8-97ec-8a7044d24867-kube-api-access-k2jks\") pod \"991f7aba-d957-49d8-97ec-8a7044d24867\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.103540 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-combined-ca-bundle\") pod \"991f7aba-d957-49d8-97ec-8a7044d24867\" (UID: \"991f7aba-d957-49d8-97ec-8a7044d24867\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.104245 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz9t2\" (UniqueName: \"kubernetes.io/projected/b6fd0120-4e8a-4982-b126-29b86e3c8e57-kube-api-access-cz9t2\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.104271 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.104285 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6fd0120-4e8a-4982-b126-29b86e3c8e57-logs\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.104299 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6fd0120-4e8a-4982-b126-29b86e3c8e57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.124965 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/991f7aba-d957-49d8-97ec-8a7044d24867-logs" (OuterVolumeSpecName: "logs") pod "991f7aba-d957-49d8-97ec-8a7044d24867" (UID: "991f7aba-d957-49d8-97ec-8a7044d24867"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.139398 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/991f7aba-d957-49d8-97ec-8a7044d24867-kube-api-access-k2jks" (OuterVolumeSpecName: "kube-api-access-k2jks") pod "991f7aba-d957-49d8-97ec-8a7044d24867" (UID: "991f7aba-d957-49d8-97ec-8a7044d24867"). InnerVolumeSpecName "kube-api-access-k2jks". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.201935 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "991f7aba-d957-49d8-97ec-8a7044d24867" (UID: "991f7aba-d957-49d8-97ec-8a7044d24867"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.207814 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2jks\" (UniqueName: \"kubernetes.io/projected/991f7aba-d957-49d8-97ec-8a7044d24867-kube-api-access-k2jks\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.208729 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.208819 4811 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f7aba-d957-49d8-97ec-8a7044d24867-logs\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.297719 4811 generic.go:334] "Generic (PLEG): container finished" podID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerID="3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781" exitCode=0 Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.297814 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b6fd0120-4e8a-4982-b126-29b86e3c8e57","Type":"ContainerDied","Data":"3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.297841 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b6fd0120-4e8a-4982-b126-29b86e3c8e57","Type":"ContainerDied","Data":"db85045ec46fa3ed8c4b0288d323bc21a784bce10b6b6f8774491e477b9f1711"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.297857 4811 scope.go:117] "RemoveContainer" containerID="3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.298541 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.303197 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-config-data" (OuterVolumeSpecName: "config-data") pod "991f7aba-d957-49d8-97ec-8a7044d24867" (UID: "991f7aba-d957-49d8-97ec-8a7044d24867"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.324667 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f7aba-d957-49d8-97ec-8a7044d24867-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.336797 4811 generic.go:334] "Generic (PLEG): container finished" podID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" containerID="66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a" exitCode=0 Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.336882 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d19b204f-1218-4aa3-93c9-f09944ae4bc0","Type":"ContainerDied","Data":"66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.368089 4811 generic.go:334] "Generic (PLEG): container finished" podID="17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" containerID="dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be" exitCode=0 Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.368231 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.375592 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a","Type":"ContainerDied","Data":"dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.375636 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a","Type":"ContainerDied","Data":"8db34fd78f6aa480cfb08c5a3acb978f204ccd67612dd055980682a13b247161"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.376841 4811 generic.go:334] "Generic (PLEG): container finished" podID="991f7aba-d957-49d8-97ec-8a7044d24867" containerID="b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e" exitCode=0 Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.377475 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.377921 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"991f7aba-d957-49d8-97ec-8a7044d24867","Type":"ContainerDied","Data":"b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.377953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"991f7aba-d957-49d8-97ec-8a7044d24867","Type":"ContainerDied","Data":"4895d28fc05599105f93465da42df124c752a70482f98f14380da6aa405d5ee5"} Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.421161 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.469290 4811 scope.go:117] "RemoveContainer" containerID="5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.473026 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.525825 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.533153 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-config-data\") pod \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.533518 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-combined-ca-bundle\") pod \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.533689 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z42g8\" (UniqueName: \"kubernetes.io/projected/d19b204f-1218-4aa3-93c9-f09944ae4bc0-kube-api-access-z42g8\") pod \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\" (UID: \"d19b204f-1218-4aa3-93c9-f09944ae4bc0\") " Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.547684 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d19b204f-1218-4aa3-93c9-f09944ae4bc0-kube-api-access-z42g8" (OuterVolumeSpecName: "kube-api-access-z42g8") pod "d19b204f-1218-4aa3-93c9-f09944ae4bc0" (UID: "d19b204f-1218-4aa3-93c9-f09944ae4bc0"). InnerVolumeSpecName "kube-api-access-z42g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.549905 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.562930 4811 scope.go:117] "RemoveContainer" containerID="3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.563564 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781\": container with ID starting with 3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781 not found: ID does not exist" containerID="3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.563618 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781"} err="failed to get container status \"3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781\": rpc error: code = NotFound desc = could not find container \"3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781\": container with ID starting with 3e8a236327bd2d4b92c95d82d7c3d48100da3bb61879a5eb425ceadbc128d781 not found: ID does not exist" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.563654 4811 scope.go:117] "RemoveContainer" containerID="5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.564084 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b\": container with ID starting with 5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b not found: ID does not exist" containerID="5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.564108 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b"} err="failed to get container status \"5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b\": rpc error: code = NotFound desc = could not find container \"5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b\": container with ID starting with 5f79ad751a978a51849c4e3eebd3d33b9a6b9a61b42dcdc52aceb93fc402e47b not found: ID does not exist" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.564125 4811 scope.go:117] "RemoveContainer" containerID="dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.575898 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.579016 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d19b204f-1218-4aa3-93c9-f09944ae4bc0" (UID: "d19b204f-1218-4aa3-93c9-f09944ae4bc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595035 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.595662 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-log" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595679 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-log" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.595691 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-metadata" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595699 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-metadata" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.595709 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-api" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595717 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-api" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.595732 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" containerName="nova-cell1-conductor-conductor" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595739 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" containerName="nova-cell1-conductor-conductor" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.595771 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-log" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595778 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-log" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.595798 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" containerName="nova-scheduler-scheduler" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.595806 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" containerName="nova-scheduler-scheduler" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.596032 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" containerName="nova-scheduler-scheduler" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.596049 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-log" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.596068 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-log" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.596081 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" containerName="nova-metadata-metadata" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.596104 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" containerName="nova-cell1-conductor-conductor" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.596114 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-api" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.597494 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.600863 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.603742 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-config-data" (OuterVolumeSpecName: "config-data") pod "d19b204f-1218-4aa3-93c9-f09944ae4bc0" (UID: "d19b204f-1218-4aa3-93c9-f09944ae4bc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.616347 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.635948 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ab8273-ae23-4456-a6db-803f2bb4cbd1-config-data\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.636179 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ab8273-ae23-4456-a6db-803f2bb4cbd1-logs\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.636379 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ab8273-ae23-4456-a6db-803f2bb4cbd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.636700 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5fw5\" (UniqueName: \"kubernetes.io/projected/17ab8273-ae23-4456-a6db-803f2bb4cbd1-kube-api-access-v5fw5\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.636904 4811 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.637001 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z42g8\" (UniqueName: \"kubernetes.io/projected/d19b204f-1218-4aa3-93c9-f09944ae4bc0-kube-api-access-z42g8\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.637092 4811 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19b204f-1218-4aa3-93c9-f09944ae4bc0-config-data\") on node \"crc\" DevicePath \"\"" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.639599 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.653694 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.655460 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.657002 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.684047 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.696742 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.698532 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.700909 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.716578 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.726562 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.739835 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4796n\" (UniqueName: \"kubernetes.io/projected/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-kube-api-access-4796n\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.739938 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ab8273-ae23-4456-a6db-803f2bb4cbd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740012 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7810437-dc40-47ba-b13c-02cdc7ca85ad-config-data\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740058 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-logs\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740118 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-config-data\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740158 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740184 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7810437-dc40-47ba-b13c-02cdc7ca85ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740218 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8f6b\" (UniqueName: \"kubernetes.io/projected/f7810437-dc40-47ba-b13c-02cdc7ca85ad-kube-api-access-s8f6b\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740260 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5fw5\" (UniqueName: \"kubernetes.io/projected/17ab8273-ae23-4456-a6db-803f2bb4cbd1-kube-api-access-v5fw5\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740361 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ab8273-ae23-4456-a6db-803f2bb4cbd1-config-data\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.740684 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ab8273-ae23-4456-a6db-803f2bb4cbd1-logs\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.741181 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ab8273-ae23-4456-a6db-803f2bb4cbd1-logs\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.744250 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ab8273-ae23-4456-a6db-803f2bb4cbd1-config-data\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.746926 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ab8273-ae23-4456-a6db-803f2bb4cbd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.763694 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5fw5\" (UniqueName: \"kubernetes.io/projected/17ab8273-ae23-4456-a6db-803f2bb4cbd1-kube-api-access-v5fw5\") pod \"nova-api-0\" (UID: \"17ab8273-ae23-4456-a6db-803f2bb4cbd1\") " pod="openstack/nova-api-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.768675 4811 scope.go:117] "RemoveContainer" containerID="dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.769089 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be\": container with ID starting with dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be not found: ID does not exist" containerID="dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.769125 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be"} err="failed to get container status \"dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be\": rpc error: code = NotFound desc = could not find container \"dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be\": container with ID starting with dc0d055121aa021e2428e8cbafe622fcf332a04bb98f3e1168f791ab6179a7be not found: ID does not exist" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.769145 4811 scope.go:117] "RemoveContainer" containerID="b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.830678 4811 scope.go:117] "RemoveContainer" containerID="5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842318 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4796n\" (UniqueName: \"kubernetes.io/projected/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-kube-api-access-4796n\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842400 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7810437-dc40-47ba-b13c-02cdc7ca85ad-config-data\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842448 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-logs\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842489 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-config-data\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842515 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842534 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7810437-dc40-47ba-b13c-02cdc7ca85ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.842558 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8f6b\" (UniqueName: \"kubernetes.io/projected/f7810437-dc40-47ba-b13c-02cdc7ca85ad-kube-api-access-s8f6b\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.843402 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-logs\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.845930 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7810437-dc40-47ba-b13c-02cdc7ca85ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.846195 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-config-data\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.846654 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7810437-dc40-47ba-b13c-02cdc7ca85ad-config-data\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.847088 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.859966 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8f6b\" (UniqueName: \"kubernetes.io/projected/f7810437-dc40-47ba-b13c-02cdc7ca85ad-kube-api-access-s8f6b\") pod \"nova-scheduler-0\" (UID: \"f7810437-dc40-47ba-b13c-02cdc7ca85ad\") " pod="openstack/nova-scheduler-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.861213 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4796n\" (UniqueName: \"kubernetes.io/projected/cd902fd7-3d4d-4f73-844e-f1cdb8f59645-kube-api-access-4796n\") pod \"nova-metadata-0\" (UID: \"cd902fd7-3d4d-4f73-844e-f1cdb8f59645\") " pod="openstack/nova-metadata-0" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.862741 4811 scope.go:117] "RemoveContainer" containerID="b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.863113 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e\": container with ID starting with b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e not found: ID does not exist" containerID="b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.863147 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e"} err="failed to get container status \"b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e\": rpc error: code = NotFound desc = could not find container \"b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e\": container with ID starting with b382d525493b04aeae4380545aa1d02d0452db6de98541672e63fc1dce4b009e not found: ID does not exist" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.863173 4811 scope.go:117] "RemoveContainer" containerID="5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692" Jan 28 18:13:46 crc kubenswrapper[4811]: E0128 18:13:46.863562 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692\": container with ID starting with 5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692 not found: ID does not exist" containerID="5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692" Jan 28 18:13:46 crc kubenswrapper[4811]: I0128 18:13:46.863586 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692"} err="failed to get container status \"5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692\": rpc error: code = NotFound desc = could not find container \"5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692\": container with ID starting with 5ebec4254260cf34f9638bc0fa9d4b6dccc78f96947628bbb6a609aa1012c692 not found: ID does not exist" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.028102 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.043751 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.064804 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.392933 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d19b204f-1218-4aa3-93c9-f09944ae4bc0","Type":"ContainerDied","Data":"b230736fc32efd1466f6fe7e97cbcc668606ddd5440375308d123be1a76ac781"} Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.393002 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.393451 4811 scope.go:117] "RemoveContainer" containerID="66f163a45abbabc6c8c264ea86fd2675faf11ab54796389dc0f6f0fb3cfb5c4a" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.438723 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.456330 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.474084 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.475809 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.483264 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.497613 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.566688 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fzvq\" (UniqueName: \"kubernetes.io/projected/0a87ce7b-6797-434f-95c9-b5840e035560-kube-api-access-6fzvq\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.566741 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a87ce7b-6797-434f-95c9-b5840e035560-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.566824 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a87ce7b-6797-434f-95c9-b5840e035560-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.621369 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.669979 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fzvq\" (UniqueName: \"kubernetes.io/projected/0a87ce7b-6797-434f-95c9-b5840e035560-kube-api-access-6fzvq\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.670051 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a87ce7b-6797-434f-95c9-b5840e035560-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.670156 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a87ce7b-6797-434f-95c9-b5840e035560-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.672844 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.677540 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a87ce7b-6797-434f-95c9-b5840e035560-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.684754 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a87ce7b-6797-434f-95c9-b5840e035560-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.688015 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fzvq\" (UniqueName: \"kubernetes.io/projected/0a87ce7b-6797-434f-95c9-b5840e035560-kube-api-access-6fzvq\") pod \"nova-cell1-conductor-0\" (UID: \"0a87ce7b-6797-434f-95c9-b5840e035560\") " pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.788835 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 28 18:13:47 crc kubenswrapper[4811]: I0128 18:13:47.806793 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.299065 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.370305 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a" path="/var/lib/kubelet/pods/17b2d4c0-7fa0-43f1-b8d9-b9c682cac47a/volumes" Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.381216 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="991f7aba-d957-49d8-97ec-8a7044d24867" path="/var/lib/kubelet/pods/991f7aba-d957-49d8-97ec-8a7044d24867/volumes" Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.382405 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" path="/var/lib/kubelet/pods/b6fd0120-4e8a-4982-b126-29b86e3c8e57/volumes" Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.383269 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d19b204f-1218-4aa3-93c9-f09944ae4bc0" path="/var/lib/kubelet/pods/d19b204f-1218-4aa3-93c9-f09944ae4bc0/volumes" Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.451427 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"0a87ce7b-6797-434f-95c9-b5840e035560","Type":"ContainerStarted","Data":"c2fd9da1262c0b6d3e379a565982a3d451a774c03ff101115031a9662541023f"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.454420 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd902fd7-3d4d-4f73-844e-f1cdb8f59645","Type":"ContainerStarted","Data":"9453cb4aa774e8d46ca12d1a9f9373359a9fb589cc2f447d66ab374d556e9238"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.454547 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd902fd7-3d4d-4f73-844e-f1cdb8f59645","Type":"ContainerStarted","Data":"87d0662ef1f88b65211168c86d129beec7f494bb7773b93aad7d0ff639e4bc3f"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.466659 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"17ab8273-ae23-4456-a6db-803f2bb4cbd1","Type":"ContainerStarted","Data":"714a8a5a582489040b9458d5067aff6075ba6d49bd2171c095abd5ccb9ede2e6"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.466791 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"17ab8273-ae23-4456-a6db-803f2bb4cbd1","Type":"ContainerStarted","Data":"1025ee5d558587e3c4593f22d109cea074be8d05c88d7767b0efef213a8c355e"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.470922 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f7810437-dc40-47ba-b13c-02cdc7ca85ad","Type":"ContainerStarted","Data":"ffb39f40f69bc0513860a5a114acd5955cfee079508b2bec9451d95f63ee72c4"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.470975 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f7810437-dc40-47ba-b13c-02cdc7ca85ad","Type":"ContainerStarted","Data":"584ced0eead579d8a36def7a7d60b1ceb55e2b276f6d7e8eb262e00f336f3295"} Jan 28 18:13:48 crc kubenswrapper[4811]: I0128 18:13:48.495263 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.495239919 podStartE2EDuration="2.495239919s" podCreationTimestamp="2026-01-28 18:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:13:48.488908729 +0000 UTC m=+8921.243272312" watchObservedRunningTime="2026-01-28 18:13:48.495239919 +0000 UTC m=+8921.249603502" Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.216191 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.486563 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"0a87ce7b-6797-434f-95c9-b5840e035560","Type":"ContainerStarted","Data":"cc38fba1f342730d4fc64b0581af03df4f9b6f5ef9971edccf44fd53d3835f59"} Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.487747 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.494383 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd902fd7-3d4d-4f73-844e-f1cdb8f59645","Type":"ContainerStarted","Data":"d28f349ee32414e0152327807a5ec263eedbae7e00dc1d1498921bdd5f436f82"} Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.503626 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"17ab8273-ae23-4456-a6db-803f2bb4cbd1","Type":"ContainerStarted","Data":"6804784b6f5551b6ba9ea3457b71f23f2e0cb8bc41323e219972d9cc63c4ceab"} Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.513093 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.513072042 podStartE2EDuration="2.513072042s" podCreationTimestamp="2026-01-28 18:13:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:13:49.501618583 +0000 UTC m=+8922.255982176" watchObservedRunningTime="2026-01-28 18:13:49.513072042 +0000 UTC m=+8922.267435625" Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.529620 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.529604096 podStartE2EDuration="3.529604096s" podCreationTimestamp="2026-01-28 18:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:13:49.524923901 +0000 UTC m=+8922.279287484" watchObservedRunningTime="2026-01-28 18:13:49.529604096 +0000 UTC m=+8922.283967679" Jan 28 18:13:49 crc kubenswrapper[4811]: I0128 18:13:49.554067 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.5540429749999998 podStartE2EDuration="3.554042975s" podCreationTimestamp="2026-01-28 18:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:13:49.544232891 +0000 UTC m=+8922.298596484" watchObservedRunningTime="2026-01-28 18:13:49.554042975 +0000 UTC m=+8922.308406558" Jan 28 18:13:52 crc kubenswrapper[4811]: I0128 18:13:52.044946 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 18:13:52 crc kubenswrapper[4811]: I0128 18:13:52.045012 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 28 18:13:52 crc kubenswrapper[4811]: I0128 18:13:52.065468 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.029264 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.029796 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.044928 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.044983 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.065801 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.096074 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.610700 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 28 18:13:57 crc kubenswrapper[4811]: I0128 18:13:57.886729 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 28 18:13:58 crc kubenswrapper[4811]: I0128 18:13:58.155664 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="17ab8273-ae23-4456-a6db-803f2bb4cbd1" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 18:13:58 crc kubenswrapper[4811]: I0128 18:13:58.155723 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cd902fd7-3d4d-4f73-844e-f1cdb8f59645" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.189:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 18:13:58 crc kubenswrapper[4811]: I0128 18:13:58.155696 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="17ab8273-ae23-4456-a6db-803f2bb4cbd1" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 18:13:58 crc kubenswrapper[4811]: I0128 18:13:58.155741 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cd902fd7-3d4d-4f73-844e-f1cdb8f59645" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.189:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 18:14:05 crc kubenswrapper[4811]: I0128 18:14:05.943905 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rbpw5"] Jan 28 18:14:05 crc kubenswrapper[4811]: I0128 18:14:05.947579 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:05 crc kubenswrapper[4811]: I0128 18:14:05.960736 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rbpw5"] Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.095952 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-catalog-content\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.095998 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dr6v\" (UniqueName: \"kubernetes.io/projected/0291a62b-bb8c-4144-8564-1c031ad667e5-kube-api-access-8dr6v\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.096210 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-utilities\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.197876 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-utilities\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.198272 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-catalog-content\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.198309 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dr6v\" (UniqueName: \"kubernetes.io/projected/0291a62b-bb8c-4144-8564-1c031ad667e5-kube-api-access-8dr6v\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.198399 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-utilities\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.198686 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-catalog-content\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.225295 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dr6v\" (UniqueName: \"kubernetes.io/projected/0291a62b-bb8c-4144-8564-1c031ad667e5-kube-api-access-8dr6v\") pod \"redhat-operators-rbpw5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.275033 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:06 crc kubenswrapper[4811]: I0128 18:14:06.834245 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rbpw5"] Jan 28 18:14:06 crc kubenswrapper[4811]: W0128 18:14:06.836314 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0291a62b_bb8c_4144_8564_1c031ad667e5.slice/crio-962786f43737952d57a25bb0e9ce0f4936a9c0cd095dac9aa90080269f273c8a WatchSource:0}: Error finding container 962786f43737952d57a25bb0e9ce0f4936a9c0cd095dac9aa90080269f273c8a: Status 404 returned error can't find the container with id 962786f43737952d57a25bb0e9ce0f4936a9c0cd095dac9aa90080269f273c8a Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.034230 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.034821 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.035590 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.035635 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.046962 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.047015 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.050012 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.050100 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.052028 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.052342 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.676525 4811 generic.go:334] "Generic (PLEG): container finished" podID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerID="f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789" exitCode=0 Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.676588 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerDied","Data":"f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789"} Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.676617 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerStarted","Data":"962786f43737952d57a25bb0e9ce0f4936a9c0cd095dac9aa90080269f273c8a"} Jan 28 18:14:07 crc kubenswrapper[4811]: I0128 18:14:07.678794 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.484907 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4"] Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.488800 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.492974 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.493256 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.493391 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.493536 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-856xk" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.493815 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.500641 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.500717 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.516791 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4"] Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658193 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658269 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658304 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658322 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658340 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658369 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658519 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.658924 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.659006 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.659041 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4t5w\" (UniqueName: \"kubernetes.io/projected/8df61c74-a922-4222-a4d1-64f4d871c1cd-kube-api-access-s4t5w\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.659065 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761546 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761603 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4t5w\" (UniqueName: \"kubernetes.io/projected/8df61c74-a922-4222-a4d1-64f4d871c1cd-kube-api-access-s4t5w\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761636 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761738 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761799 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761846 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761872 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761898 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.761989 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.762024 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.762156 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.763341 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.763390 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.768181 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.768464 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.768773 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.768807 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.769289 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.769380 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.769852 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.771996 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.781901 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4t5w\" (UniqueName: \"kubernetes.io/projected/8df61c74-a922-4222-a4d1-64f4d871c1cd-kube-api-access-s4t5w\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:08 crc kubenswrapper[4811]: I0128 18:14:08.811096 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:14:09 crc kubenswrapper[4811]: I0128 18:14:09.421794 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4"] Jan 28 18:14:09 crc kubenswrapper[4811]: W0128 18:14:09.429305 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8df61c74_a922_4222_a4d1_64f4d871c1cd.slice/crio-eb70ce05819f8e30b344cc05813740377c12646c7e3d2092e23fa26616389166 WatchSource:0}: Error finding container eb70ce05819f8e30b344cc05813740377c12646c7e3d2092e23fa26616389166: Status 404 returned error can't find the container with id eb70ce05819f8e30b344cc05813740377c12646c7e3d2092e23fa26616389166 Jan 28 18:14:09 crc kubenswrapper[4811]: I0128 18:14:09.696680 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" event={"ID":"8df61c74-a922-4222-a4d1-64f4d871c1cd","Type":"ContainerStarted","Data":"eb70ce05819f8e30b344cc05813740377c12646c7e3d2092e23fa26616389166"} Jan 28 18:14:09 crc kubenswrapper[4811]: I0128 18:14:09.700003 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerStarted","Data":"8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058"} Jan 28 18:14:11 crc kubenswrapper[4811]: I0128 18:14:11.722717 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" event={"ID":"8df61c74-a922-4222-a4d1-64f4d871c1cd","Type":"ContainerStarted","Data":"afdf191d963e4f1af55dbc07bbe757baf7e507e646836215c83e489a9acb14ea"} Jan 28 18:14:11 crc kubenswrapper[4811]: I0128 18:14:11.750425 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" podStartSLOduration=2.497280648 podStartE2EDuration="3.750402514s" podCreationTimestamp="2026-01-28 18:14:08 +0000 UTC" firstStartedPulling="2026-01-28 18:14:09.431755152 +0000 UTC m=+8942.186118735" lastFinishedPulling="2026-01-28 18:14:10.684877018 +0000 UTC m=+8943.439240601" observedRunningTime="2026-01-28 18:14:11.739812389 +0000 UTC m=+8944.494175982" watchObservedRunningTime="2026-01-28 18:14:11.750402514 +0000 UTC m=+8944.504766097" Jan 28 18:14:15 crc kubenswrapper[4811]: I0128 18:14:15.603912 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.86:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 28 18:14:15 crc kubenswrapper[4811]: I0128 18:14:15.603924 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="b6fd0120-4e8a-4982-b126-29b86e3c8e57" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.86:8774/\": dial tcp 10.217.1.86:8774: i/o timeout (Client.Timeout exceeded while awaiting headers)" Jan 28 18:14:18 crc kubenswrapper[4811]: I0128 18:14:18.813120 4811 generic.go:334] "Generic (PLEG): container finished" podID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerID="8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058" exitCode=0 Jan 28 18:14:18 crc kubenswrapper[4811]: I0128 18:14:18.813343 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerDied","Data":"8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058"} Jan 28 18:14:19 crc kubenswrapper[4811]: I0128 18:14:19.825808 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerStarted","Data":"d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30"} Jan 28 18:14:19 crc kubenswrapper[4811]: I0128 18:14:19.877781 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rbpw5" podStartSLOduration=3.121057458 podStartE2EDuration="14.877619441s" podCreationTimestamp="2026-01-28 18:14:05 +0000 UTC" firstStartedPulling="2026-01-28 18:14:07.678487347 +0000 UTC m=+8940.432850930" lastFinishedPulling="2026-01-28 18:14:19.43504933 +0000 UTC m=+8952.189412913" observedRunningTime="2026-01-28 18:14:19.84522773 +0000 UTC m=+8952.599591323" watchObservedRunningTime="2026-01-28 18:14:19.877619441 +0000 UTC m=+8952.631983024" Jan 28 18:14:26 crc kubenswrapper[4811]: I0128 18:14:26.275970 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:26 crc kubenswrapper[4811]: I0128 18:14:26.277512 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:26 crc kubenswrapper[4811]: I0128 18:14:26.322915 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:26 crc kubenswrapper[4811]: I0128 18:14:26.963825 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:27 crc kubenswrapper[4811]: I0128 18:14:27.020195 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rbpw5"] Jan 28 18:14:28 crc kubenswrapper[4811]: I0128 18:14:28.908081 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rbpw5" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="registry-server" containerID="cri-o://d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30" gracePeriod=2 Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.416771 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.497310 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-utilities\") pod \"0291a62b-bb8c-4144-8564-1c031ad667e5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.497357 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-catalog-content\") pod \"0291a62b-bb8c-4144-8564-1c031ad667e5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.497395 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dr6v\" (UniqueName: \"kubernetes.io/projected/0291a62b-bb8c-4144-8564-1c031ad667e5-kube-api-access-8dr6v\") pod \"0291a62b-bb8c-4144-8564-1c031ad667e5\" (UID: \"0291a62b-bb8c-4144-8564-1c031ad667e5\") " Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.497948 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-utilities" (OuterVolumeSpecName: "utilities") pod "0291a62b-bb8c-4144-8564-1c031ad667e5" (UID: "0291a62b-bb8c-4144-8564-1c031ad667e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.503305 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0291a62b-bb8c-4144-8564-1c031ad667e5-kube-api-access-8dr6v" (OuterVolumeSpecName: "kube-api-access-8dr6v") pod "0291a62b-bb8c-4144-8564-1c031ad667e5" (UID: "0291a62b-bb8c-4144-8564-1c031ad667e5"). InnerVolumeSpecName "kube-api-access-8dr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.600158 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.600216 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dr6v\" (UniqueName: \"kubernetes.io/projected/0291a62b-bb8c-4144-8564-1c031ad667e5-kube-api-access-8dr6v\") on node \"crc\" DevicePath \"\"" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.629659 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0291a62b-bb8c-4144-8564-1c031ad667e5" (UID: "0291a62b-bb8c-4144-8564-1c031ad667e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.701825 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0291a62b-bb8c-4144-8564-1c031ad667e5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.920235 4811 generic.go:334] "Generic (PLEG): container finished" podID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerID="d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30" exitCode=0 Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.920316 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerDied","Data":"d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30"} Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.920352 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rbpw5" event={"ID":"0291a62b-bb8c-4144-8564-1c031ad667e5","Type":"ContainerDied","Data":"962786f43737952d57a25bb0e9ce0f4936a9c0cd095dac9aa90080269f273c8a"} Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.920379 4811 scope.go:117] "RemoveContainer" containerID="d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.920581 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rbpw5" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.958110 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rbpw5"] Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.964818 4811 scope.go:117] "RemoveContainer" containerID="8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058" Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.969013 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rbpw5"] Jan 28 18:14:29 crc kubenswrapper[4811]: I0128 18:14:29.997539 4811 scope.go:117] "RemoveContainer" containerID="f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.043637 4811 scope.go:117] "RemoveContainer" containerID="d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30" Jan 28 18:14:30 crc kubenswrapper[4811]: E0128 18:14:30.044229 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30\": container with ID starting with d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30 not found: ID does not exist" containerID="d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.044258 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30"} err="failed to get container status \"d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30\": rpc error: code = NotFound desc = could not find container \"d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30\": container with ID starting with d8530ee05ec8c3dea884cc1c54beb78890e757fef6a472496bfc9741abba9d30 not found: ID does not exist" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.044295 4811 scope.go:117] "RemoveContainer" containerID="8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058" Jan 28 18:14:30 crc kubenswrapper[4811]: E0128 18:14:30.044800 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058\": container with ID starting with 8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058 not found: ID does not exist" containerID="8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.044818 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058"} err="failed to get container status \"8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058\": rpc error: code = NotFound desc = could not find container \"8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058\": container with ID starting with 8978a2f8b2ee284c3a6ddaa6eb1da4b130f0368bdffbb5e88ba10d3420f0b058 not found: ID does not exist" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.044831 4811 scope.go:117] "RemoveContainer" containerID="f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789" Jan 28 18:14:30 crc kubenswrapper[4811]: E0128 18:14:30.045079 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789\": container with ID starting with f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789 not found: ID does not exist" containerID="f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.045094 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789"} err="failed to get container status \"f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789\": rpc error: code = NotFound desc = could not find container \"f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789\": container with ID starting with f464710750ce257b79c994be5633216f409ecf8476e901bb03c6f36f05c7a789 not found: ID does not exist" Jan 28 18:14:30 crc kubenswrapper[4811]: I0128 18:14:30.353338 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" path="/var/lib/kubelet/pods/0291a62b-bb8c-4144-8564-1c031ad667e5/volumes" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.156813 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc"] Jan 28 18:15:00 crc kubenswrapper[4811]: E0128 18:15:00.158730 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="extract-content" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.158754 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="extract-content" Jan 28 18:15:00 crc kubenswrapper[4811]: E0128 18:15:00.158776 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="registry-server" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.158785 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="registry-server" Jan 28 18:15:00 crc kubenswrapper[4811]: E0128 18:15:00.158811 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="extract-utilities" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.158821 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="extract-utilities" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.159291 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0291a62b-bb8c-4144-8564-1c031ad667e5" containerName="registry-server" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.160935 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.163424 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.164102 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.170282 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc"] Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.319935 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95cdcd59-25e5-4f38-985b-be1be433eb8e-config-volume\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.320301 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95cdcd59-25e5-4f38-985b-be1be433eb8e-secret-volume\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.321555 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv4lw\" (UniqueName: \"kubernetes.io/projected/95cdcd59-25e5-4f38-985b-be1be433eb8e-kube-api-access-wv4lw\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.423836 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95cdcd59-25e5-4f38-985b-be1be433eb8e-secret-volume\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.424193 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv4lw\" (UniqueName: \"kubernetes.io/projected/95cdcd59-25e5-4f38-985b-be1be433eb8e-kube-api-access-wv4lw\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.424301 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95cdcd59-25e5-4f38-985b-be1be433eb8e-config-volume\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.425461 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95cdcd59-25e5-4f38-985b-be1be433eb8e-config-volume\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.439196 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95cdcd59-25e5-4f38-985b-be1be433eb8e-secret-volume\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.446686 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv4lw\" (UniqueName: \"kubernetes.io/projected/95cdcd59-25e5-4f38-985b-be1be433eb8e-kube-api-access-wv4lw\") pod \"collect-profiles-29493735-rc6fc\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.487298 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:00 crc kubenswrapper[4811]: I0128 18:15:00.978547 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc"] Jan 28 18:15:01 crc kubenswrapper[4811]: I0128 18:15:01.251993 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" event={"ID":"95cdcd59-25e5-4f38-985b-be1be433eb8e","Type":"ContainerStarted","Data":"6d04080b4362aa5b4bd7bbf0cfd3108714b36e2623b54c73d1d1f27e5e631d18"} Jan 28 18:15:01 crc kubenswrapper[4811]: I0128 18:15:01.252045 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" event={"ID":"95cdcd59-25e5-4f38-985b-be1be433eb8e","Type":"ContainerStarted","Data":"e4b8c24fb32a0d5ae7683604ffae27c5935c0255a6075c54b35bcf6706fbb99c"} Jan 28 18:15:01 crc kubenswrapper[4811]: I0128 18:15:01.272029 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" podStartSLOduration=1.272005304 podStartE2EDuration="1.272005304s" podCreationTimestamp="2026-01-28 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:15:01.264202094 +0000 UTC m=+8994.018565677" watchObservedRunningTime="2026-01-28 18:15:01.272005304 +0000 UTC m=+8994.026368887" Jan 28 18:15:02 crc kubenswrapper[4811]: I0128 18:15:02.263614 4811 generic.go:334] "Generic (PLEG): container finished" podID="95cdcd59-25e5-4f38-985b-be1be433eb8e" containerID="6d04080b4362aa5b4bd7bbf0cfd3108714b36e2623b54c73d1d1f27e5e631d18" exitCode=0 Jan 28 18:15:02 crc kubenswrapper[4811]: I0128 18:15:02.263665 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" event={"ID":"95cdcd59-25e5-4f38-985b-be1be433eb8e","Type":"ContainerDied","Data":"6d04080b4362aa5b4bd7bbf0cfd3108714b36e2623b54c73d1d1f27e5e631d18"} Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.087570 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.087885 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.647825 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.819161 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95cdcd59-25e5-4f38-985b-be1be433eb8e-config-volume\") pod \"95cdcd59-25e5-4f38-985b-be1be433eb8e\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.819339 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv4lw\" (UniqueName: \"kubernetes.io/projected/95cdcd59-25e5-4f38-985b-be1be433eb8e-kube-api-access-wv4lw\") pod \"95cdcd59-25e5-4f38-985b-be1be433eb8e\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.819575 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95cdcd59-25e5-4f38-985b-be1be433eb8e-secret-volume\") pod \"95cdcd59-25e5-4f38-985b-be1be433eb8e\" (UID: \"95cdcd59-25e5-4f38-985b-be1be433eb8e\") " Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.820085 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95cdcd59-25e5-4f38-985b-be1be433eb8e-config-volume" (OuterVolumeSpecName: "config-volume") pod "95cdcd59-25e5-4f38-985b-be1be433eb8e" (UID: "95cdcd59-25e5-4f38-985b-be1be433eb8e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.820242 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95cdcd59-25e5-4f38-985b-be1be433eb8e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.825229 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cdcd59-25e5-4f38-985b-be1be433eb8e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "95cdcd59-25e5-4f38-985b-be1be433eb8e" (UID: "95cdcd59-25e5-4f38-985b-be1be433eb8e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.825701 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95cdcd59-25e5-4f38-985b-be1be433eb8e-kube-api-access-wv4lw" (OuterVolumeSpecName: "kube-api-access-wv4lw") pod "95cdcd59-25e5-4f38-985b-be1be433eb8e" (UID: "95cdcd59-25e5-4f38-985b-be1be433eb8e"). InnerVolumeSpecName "kube-api-access-wv4lw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.922397 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95cdcd59-25e5-4f38-985b-be1be433eb8e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:03 crc kubenswrapper[4811]: I0128 18:15:03.922507 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv4lw\" (UniqueName: \"kubernetes.io/projected/95cdcd59-25e5-4f38-985b-be1be433eb8e-kube-api-access-wv4lw\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:04 crc kubenswrapper[4811]: I0128 18:15:04.290553 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" event={"ID":"95cdcd59-25e5-4f38-985b-be1be433eb8e","Type":"ContainerDied","Data":"e4b8c24fb32a0d5ae7683604ffae27c5935c0255a6075c54b35bcf6706fbb99c"} Jan 28 18:15:04 crc kubenswrapper[4811]: I0128 18:15:04.290586 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4b8c24fb32a0d5ae7683604ffae27c5935c0255a6075c54b35bcf6706fbb99c" Jan 28 18:15:04 crc kubenswrapper[4811]: I0128 18:15:04.290601 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493735-rc6fc" Jan 28 18:15:04 crc kubenswrapper[4811]: I0128 18:15:04.358873 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk"] Jan 28 18:15:04 crc kubenswrapper[4811]: I0128 18:15:04.372881 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493690-tb5dk"] Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.351957 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e82f4af7-32ad-4515-ab89-cfb42f104ea1" path="/var/lib/kubelet/pods/e82f4af7-32ad-4515-ab89-cfb42f104ea1/volumes" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.678792 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s28vp"] Jan 28 18:15:06 crc kubenswrapper[4811]: E0128 18:15:06.679275 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95cdcd59-25e5-4f38-985b-be1be433eb8e" containerName="collect-profiles" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.679291 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="95cdcd59-25e5-4f38-985b-be1be433eb8e" containerName="collect-profiles" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.679504 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="95cdcd59-25e5-4f38-985b-be1be433eb8e" containerName="collect-profiles" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.681033 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.700914 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s28vp"] Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.879893 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-catalog-content\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.879982 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-utilities\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.880422 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tqtf\" (UniqueName: \"kubernetes.io/projected/5dfb1906-b61d-4953-9349-ea4ccb7088fd-kube-api-access-7tqtf\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.982477 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tqtf\" (UniqueName: \"kubernetes.io/projected/5dfb1906-b61d-4953-9349-ea4ccb7088fd-kube-api-access-7tqtf\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.982557 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-catalog-content\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.982607 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-utilities\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.983147 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-utilities\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:06 crc kubenswrapper[4811]: I0128 18:15:06.983393 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-catalog-content\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:07 crc kubenswrapper[4811]: I0128 18:15:07.013575 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tqtf\" (UniqueName: \"kubernetes.io/projected/5dfb1906-b61d-4953-9349-ea4ccb7088fd-kube-api-access-7tqtf\") pod \"redhat-marketplace-s28vp\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:07 crc kubenswrapper[4811]: I0128 18:15:07.301418 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:07 crc kubenswrapper[4811]: I0128 18:15:07.762742 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s28vp"] Jan 28 18:15:08 crc kubenswrapper[4811]: I0128 18:15:08.326209 4811 generic.go:334] "Generic (PLEG): container finished" podID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerID="08c2fbd956012d14af3ba2bc6c19ea37af8264bcff0de2bcad4a959949d8155a" exitCode=0 Jan 28 18:15:08 crc kubenswrapper[4811]: I0128 18:15:08.326262 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerDied","Data":"08c2fbd956012d14af3ba2bc6c19ea37af8264bcff0de2bcad4a959949d8155a"} Jan 28 18:15:08 crc kubenswrapper[4811]: I0128 18:15:08.326458 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerStarted","Data":"f1a4d830e8bf986e9bb12f10168f6a40894f7a46d944946e4254c3f5c28952ad"} Jan 28 18:15:10 crc kubenswrapper[4811]: I0128 18:15:10.353250 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerStarted","Data":"bb2aec129f6fbb2473e3874812c2702b15050752b817e86c6b107e09e9c9d201"} Jan 28 18:15:12 crc kubenswrapper[4811]: I0128 18:15:12.439969 4811 generic.go:334] "Generic (PLEG): container finished" podID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerID="bb2aec129f6fbb2473e3874812c2702b15050752b817e86c6b107e09e9c9d201" exitCode=0 Jan 28 18:15:12 crc kubenswrapper[4811]: I0128 18:15:12.440308 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerDied","Data":"bb2aec129f6fbb2473e3874812c2702b15050752b817e86c6b107e09e9c9d201"} Jan 28 18:15:13 crc kubenswrapper[4811]: I0128 18:15:13.450960 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerStarted","Data":"555f0a7190facdc90d423cdb23c12b20fb0d1e77541d1a307a9ee3eba5302d74"} Jan 28 18:15:13 crc kubenswrapper[4811]: I0128 18:15:13.483223 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s28vp" podStartSLOduration=2.753817721 podStartE2EDuration="7.483198372s" podCreationTimestamp="2026-01-28 18:15:06 +0000 UTC" firstStartedPulling="2026-01-28 18:15:08.328808463 +0000 UTC m=+9001.083172056" lastFinishedPulling="2026-01-28 18:15:13.058189124 +0000 UTC m=+9005.812552707" observedRunningTime="2026-01-28 18:15:13.471662252 +0000 UTC m=+9006.226025845" watchObservedRunningTime="2026-01-28 18:15:13.483198372 +0000 UTC m=+9006.237561965" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.270282 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bqrq8"] Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.273521 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.281772 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bqrq8"] Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.468864 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-utilities\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.468968 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkhl7\" (UniqueName: \"kubernetes.io/projected/65a87541-aff8-46ab-ae81-33ba87f41d4c-kube-api-access-vkhl7\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.469096 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-catalog-content\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.570811 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-catalog-content\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.570970 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-utilities\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.571009 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkhl7\" (UniqueName: \"kubernetes.io/projected/65a87541-aff8-46ab-ae81-33ba87f41d4c-kube-api-access-vkhl7\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.571694 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-catalog-content\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.571731 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-utilities\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.594487 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkhl7\" (UniqueName: \"kubernetes.io/projected/65a87541-aff8-46ab-ae81-33ba87f41d4c-kube-api-access-vkhl7\") pod \"certified-operators-bqrq8\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:15 crc kubenswrapper[4811]: I0128 18:15:15.595020 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:16 crc kubenswrapper[4811]: I0128 18:15:16.131712 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bqrq8"] Jan 28 18:15:16 crc kubenswrapper[4811]: W0128 18:15:16.137610 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65a87541_aff8_46ab_ae81_33ba87f41d4c.slice/crio-202cf0137c9f762d83d90a7510ae3dee837bd2b44f7075f95373687a5ed55d4d WatchSource:0}: Error finding container 202cf0137c9f762d83d90a7510ae3dee837bd2b44f7075f95373687a5ed55d4d: Status 404 returned error can't find the container with id 202cf0137c9f762d83d90a7510ae3dee837bd2b44f7075f95373687a5ed55d4d Jan 28 18:15:16 crc kubenswrapper[4811]: I0128 18:15:16.478910 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerStarted","Data":"202cf0137c9f762d83d90a7510ae3dee837bd2b44f7075f95373687a5ed55d4d"} Jan 28 18:15:17 crc kubenswrapper[4811]: I0128 18:15:17.302060 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:17 crc kubenswrapper[4811]: I0128 18:15:17.302345 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:17 crc kubenswrapper[4811]: I0128 18:15:17.497476 4811 generic.go:334] "Generic (PLEG): container finished" podID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerID="2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985" exitCode=0 Jan 28 18:15:17 crc kubenswrapper[4811]: I0128 18:15:17.497525 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerDied","Data":"2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985"} Jan 28 18:15:17 crc kubenswrapper[4811]: I0128 18:15:17.769445 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:19 crc kubenswrapper[4811]: I0128 18:15:19.527570 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerStarted","Data":"34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d"} Jan 28 18:15:24 crc kubenswrapper[4811]: I0128 18:15:24.590831 4811 generic.go:334] "Generic (PLEG): container finished" podID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerID="34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d" exitCode=0 Jan 28 18:15:24 crc kubenswrapper[4811]: I0128 18:15:24.590943 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerDied","Data":"34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d"} Jan 28 18:15:26 crc kubenswrapper[4811]: I0128 18:15:26.611626 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerStarted","Data":"ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044"} Jan 28 18:15:26 crc kubenswrapper[4811]: I0128 18:15:26.627219 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bqrq8" podStartSLOduration=3.482715054 podStartE2EDuration="11.627202456s" podCreationTimestamp="2026-01-28 18:15:15 +0000 UTC" firstStartedPulling="2026-01-28 18:15:17.503270444 +0000 UTC m=+9010.257634027" lastFinishedPulling="2026-01-28 18:15:25.647757846 +0000 UTC m=+9018.402121429" observedRunningTime="2026-01-28 18:15:26.626572719 +0000 UTC m=+9019.380936332" watchObservedRunningTime="2026-01-28 18:15:26.627202456 +0000 UTC m=+9019.381566039" Jan 28 18:15:27 crc kubenswrapper[4811]: I0128 18:15:27.358175 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:27 crc kubenswrapper[4811]: I0128 18:15:27.411869 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s28vp"] Jan 28 18:15:27 crc kubenswrapper[4811]: I0128 18:15:27.620515 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s28vp" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="registry-server" containerID="cri-o://555f0a7190facdc90d423cdb23c12b20fb0d1e77541d1a307a9ee3eba5302d74" gracePeriod=2 Jan 28 18:15:28 crc kubenswrapper[4811]: I0128 18:15:28.631412 4811 generic.go:334] "Generic (PLEG): container finished" podID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerID="555f0a7190facdc90d423cdb23c12b20fb0d1e77541d1a307a9ee3eba5302d74" exitCode=0 Jan 28 18:15:28 crc kubenswrapper[4811]: I0128 18:15:28.631467 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerDied","Data":"555f0a7190facdc90d423cdb23c12b20fb0d1e77541d1a307a9ee3eba5302d74"} Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.396996 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.520604 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tqtf\" (UniqueName: \"kubernetes.io/projected/5dfb1906-b61d-4953-9349-ea4ccb7088fd-kube-api-access-7tqtf\") pod \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.520726 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-utilities\") pod \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.520839 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-catalog-content\") pod \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\" (UID: \"5dfb1906-b61d-4953-9349-ea4ccb7088fd\") " Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.521546 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-utilities" (OuterVolumeSpecName: "utilities") pod "5dfb1906-b61d-4953-9349-ea4ccb7088fd" (UID: "5dfb1906-b61d-4953-9349-ea4ccb7088fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.521740 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.526812 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dfb1906-b61d-4953-9349-ea4ccb7088fd-kube-api-access-7tqtf" (OuterVolumeSpecName: "kube-api-access-7tqtf") pod "5dfb1906-b61d-4953-9349-ea4ccb7088fd" (UID: "5dfb1906-b61d-4953-9349-ea4ccb7088fd"). InnerVolumeSpecName "kube-api-access-7tqtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.541932 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5dfb1906-b61d-4953-9349-ea4ccb7088fd" (UID: "5dfb1906-b61d-4953-9349-ea4ccb7088fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.623776 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tqtf\" (UniqueName: \"kubernetes.io/projected/5dfb1906-b61d-4953-9349-ea4ccb7088fd-kube-api-access-7tqtf\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.623817 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb1906-b61d-4953-9349-ea4ccb7088fd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.644061 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s28vp" event={"ID":"5dfb1906-b61d-4953-9349-ea4ccb7088fd","Type":"ContainerDied","Data":"f1a4d830e8bf986e9bb12f10168f6a40894f7a46d944946e4254c3f5c28952ad"} Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.644136 4811 scope.go:117] "RemoveContainer" containerID="555f0a7190facdc90d423cdb23c12b20fb0d1e77541d1a307a9ee3eba5302d74" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.644221 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s28vp" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.673166 4811 scope.go:117] "RemoveContainer" containerID="bb2aec129f6fbb2473e3874812c2702b15050752b817e86c6b107e09e9c9d201" Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.683787 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s28vp"] Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.694653 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s28vp"] Jan 28 18:15:29 crc kubenswrapper[4811]: I0128 18:15:29.715896 4811 scope.go:117] "RemoveContainer" containerID="08c2fbd956012d14af3ba2bc6c19ea37af8264bcff0de2bcad4a959949d8155a" Jan 28 18:15:30 crc kubenswrapper[4811]: I0128 18:15:30.351761 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" path="/var/lib/kubelet/pods/5dfb1906-b61d-4953-9349-ea4ccb7088fd/volumes" Jan 28 18:15:33 crc kubenswrapper[4811]: I0128 18:15:33.087758 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:15:33 crc kubenswrapper[4811]: I0128 18:15:33.088263 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:15:35 crc kubenswrapper[4811]: I0128 18:15:35.596109 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:35 crc kubenswrapper[4811]: I0128 18:15:35.596627 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:35 crc kubenswrapper[4811]: I0128 18:15:35.661470 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:35 crc kubenswrapper[4811]: I0128 18:15:35.766164 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:35 crc kubenswrapper[4811]: I0128 18:15:35.903087 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bqrq8"] Jan 28 18:15:37 crc kubenswrapper[4811]: I0128 18:15:37.740715 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bqrq8" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="registry-server" containerID="cri-o://ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044" gracePeriod=2 Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.151169 4811 scope.go:117] "RemoveContainer" containerID="b467fb8cbbce2277767cc26cb35d2a020c7e93c806fabc9d86ffba6dd3cdd98d" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.330972 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.435709 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-catalog-content\") pod \"65a87541-aff8-46ab-ae81-33ba87f41d4c\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.436202 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkhl7\" (UniqueName: \"kubernetes.io/projected/65a87541-aff8-46ab-ae81-33ba87f41d4c-kube-api-access-vkhl7\") pod \"65a87541-aff8-46ab-ae81-33ba87f41d4c\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.436322 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-utilities\") pod \"65a87541-aff8-46ab-ae81-33ba87f41d4c\" (UID: \"65a87541-aff8-46ab-ae81-33ba87f41d4c\") " Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.437061 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-utilities" (OuterVolumeSpecName: "utilities") pod "65a87541-aff8-46ab-ae81-33ba87f41d4c" (UID: "65a87541-aff8-46ab-ae81-33ba87f41d4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.438905 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.442168 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a87541-aff8-46ab-ae81-33ba87f41d4c-kube-api-access-vkhl7" (OuterVolumeSpecName: "kube-api-access-vkhl7") pod "65a87541-aff8-46ab-ae81-33ba87f41d4c" (UID: "65a87541-aff8-46ab-ae81-33ba87f41d4c"). InnerVolumeSpecName "kube-api-access-vkhl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.481925 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65a87541-aff8-46ab-ae81-33ba87f41d4c" (UID: "65a87541-aff8-46ab-ae81-33ba87f41d4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.540601 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkhl7\" (UniqueName: \"kubernetes.io/projected/65a87541-aff8-46ab-ae81-33ba87f41d4c-kube-api-access-vkhl7\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.540652 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a87541-aff8-46ab-ae81-33ba87f41d4c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.767060 4811 generic.go:334] "Generic (PLEG): container finished" podID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerID="ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044" exitCode=0 Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.767128 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqrq8" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.767136 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerDied","Data":"ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044"} Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.768590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqrq8" event={"ID":"65a87541-aff8-46ab-ae81-33ba87f41d4c","Type":"ContainerDied","Data":"202cf0137c9f762d83d90a7510ae3dee837bd2b44f7075f95373687a5ed55d4d"} Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.768669 4811 scope.go:117] "RemoveContainer" containerID="ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.805383 4811 scope.go:117] "RemoveContainer" containerID="34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.823520 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bqrq8"] Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.841765 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bqrq8"] Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.843106 4811 scope.go:117] "RemoveContainer" containerID="2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.870539 4811 scope.go:117] "RemoveContainer" containerID="ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044" Jan 28 18:15:38 crc kubenswrapper[4811]: E0128 18:15:38.871087 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044\": container with ID starting with ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044 not found: ID does not exist" containerID="ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.871125 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044"} err="failed to get container status \"ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044\": rpc error: code = NotFound desc = could not find container \"ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044\": container with ID starting with ccf4493cf07cc00229795dbe73c5112fe61a1d2604f39079dc846106d708b044 not found: ID does not exist" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.871151 4811 scope.go:117] "RemoveContainer" containerID="34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d" Jan 28 18:15:38 crc kubenswrapper[4811]: E0128 18:15:38.871615 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d\": container with ID starting with 34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d not found: ID does not exist" containerID="34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.871810 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d"} err="failed to get container status \"34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d\": rpc error: code = NotFound desc = could not find container \"34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d\": container with ID starting with 34771992e655f9c1797ed190231c663e3b6dbc67c22d68ede9df90919456a34d not found: ID does not exist" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.871958 4811 scope.go:117] "RemoveContainer" containerID="2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985" Jan 28 18:15:38 crc kubenswrapper[4811]: E0128 18:15:38.872665 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985\": container with ID starting with 2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985 not found: ID does not exist" containerID="2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985" Jan 28 18:15:38 crc kubenswrapper[4811]: I0128 18:15:38.872704 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985"} err="failed to get container status \"2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985\": rpc error: code = NotFound desc = could not find container \"2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985\": container with ID starting with 2e9db1910288aff43e1027ec42a579d47e57eb690c45bd8e90044ac7aa1df985 not found: ID does not exist" Jan 28 18:15:40 crc kubenswrapper[4811]: I0128 18:15:40.373693 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" path="/var/lib/kubelet/pods/65a87541-aff8-46ab-ae81-33ba87f41d4c/volumes" Jan 28 18:16:03 crc kubenswrapper[4811]: I0128 18:16:03.087712 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:16:03 crc kubenswrapper[4811]: I0128 18:16:03.088227 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:16:03 crc kubenswrapper[4811]: I0128 18:16:03.088272 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:16:03 crc kubenswrapper[4811]: I0128 18:16:03.089227 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:16:03 crc kubenswrapper[4811]: I0128 18:16:03.089298 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" gracePeriod=600 Jan 28 18:16:03 crc kubenswrapper[4811]: E0128 18:16:03.419782 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:16:04 crc kubenswrapper[4811]: I0128 18:16:04.049244 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" exitCode=0 Jan 28 18:16:04 crc kubenswrapper[4811]: I0128 18:16:04.049324 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12"} Jan 28 18:16:04 crc kubenswrapper[4811]: I0128 18:16:04.049645 4811 scope.go:117] "RemoveContainer" containerID="aeca1ea953b265c0f4e346981f2f377031cd40369c372fe999b89d4262909504" Jan 28 18:16:04 crc kubenswrapper[4811]: I0128 18:16:04.050403 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:16:04 crc kubenswrapper[4811]: E0128 18:16:04.050783 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:16:16 crc kubenswrapper[4811]: I0128 18:16:16.343094 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:16:16 crc kubenswrapper[4811]: E0128 18:16:16.344076 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:16:31 crc kubenswrapper[4811]: I0128 18:16:31.340206 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:16:31 crc kubenswrapper[4811]: E0128 18:16:31.341153 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:16:46 crc kubenswrapper[4811]: I0128 18:16:46.340848 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:16:46 crc kubenswrapper[4811]: E0128 18:16:46.341578 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:16:58 crc kubenswrapper[4811]: I0128 18:16:58.348411 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:16:58 crc kubenswrapper[4811]: E0128 18:16:58.350087 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:17:09 crc kubenswrapper[4811]: I0128 18:17:09.346373 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:17:09 crc kubenswrapper[4811]: E0128 18:17:09.348261 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:17:09 crc kubenswrapper[4811]: I0128 18:17:09.711284 4811 generic.go:334] "Generic (PLEG): container finished" podID="8df61c74-a922-4222-a4d1-64f4d871c1cd" containerID="afdf191d963e4f1af55dbc07bbe757baf7e507e646836215c83e489a9acb14ea" exitCode=0 Jan 28 18:17:09 crc kubenswrapper[4811]: I0128 18:17:09.711331 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" event={"ID":"8df61c74-a922-4222-a4d1-64f4d871c1cd","Type":"ContainerDied","Data":"afdf191d963e4f1af55dbc07bbe757baf7e507e646836215c83e489a9acb14ea"} Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.215957 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.330877 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ssh-key-openstack-cell1\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.330981 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-0\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331043 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4t5w\" (UniqueName: \"kubernetes.io/projected/8df61c74-a922-4222-a4d1-64f4d871c1cd-kube-api-access-s4t5w\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331074 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-0\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331153 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ceph\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331196 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-combined-ca-bundle\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331241 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-1\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331305 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-0\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331359 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-inventory\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331388 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-1\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.331453 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-1\") pod \"8df61c74-a922-4222-a4d1-64f4d871c1cd\" (UID: \"8df61c74-a922-4222-a4d1-64f4d871c1cd\") " Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.338468 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8df61c74-a922-4222-a4d1-64f4d871c1cd-kube-api-access-s4t5w" (OuterVolumeSpecName: "kube-api-access-s4t5w") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "kube-api-access-s4t5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.351385 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ceph" (OuterVolumeSpecName: "ceph") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.352211 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.362708 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.365726 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.367024 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.368745 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.368876 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.369332 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.376034 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.383642 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-inventory" (OuterVolumeSpecName: "inventory") pod "8df61c74-a922-4222-a4d1-64f4d871c1cd" (UID: "8df61c74-a922-4222-a4d1-64f4d871c1cd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436276 4811 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436340 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436370 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4t5w\" (UniqueName: \"kubernetes.io/projected/8df61c74-a922-4222-a4d1-64f4d871c1cd-kube-api-access-s4t5w\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436392 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436416 4811 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-ceph\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436479 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436522 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436548 4811 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436573 4811 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-inventory\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436597 4811 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.436620 4811 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8df61c74-a922-4222-a4d1-64f4d871c1cd-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.734590 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" event={"ID":"8df61c74-a922-4222-a4d1-64f4d871c1cd","Type":"ContainerDied","Data":"eb70ce05819f8e30b344cc05813740377c12646c7e3d2092e23fa26616389166"} Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.734868 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb70ce05819f8e30b344cc05813740377c12646c7e3d2092e23fa26616389166" Jan 28 18:17:11 crc kubenswrapper[4811]: I0128 18:17:11.734652 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622088 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xpsq6"] Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622567 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="registry-server" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622581 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="registry-server" Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622600 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="extract-content" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622608 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="extract-content" Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622628 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8df61c74-a922-4222-a4d1-64f4d871c1cd" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622636 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="8df61c74-a922-4222-a4d1-64f4d871c1cd" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622648 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="extract-content" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622653 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="extract-content" Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622673 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="registry-server" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622680 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="registry-server" Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622689 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="extract-utilities" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622695 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="extract-utilities" Jan 28 18:17:12 crc kubenswrapper[4811]: E0128 18:17:12.622707 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="extract-utilities" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622712 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="extract-utilities" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622897 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dfb1906-b61d-4953-9349-ea4ccb7088fd" containerName="registry-server" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622919 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a87541-aff8-46ab-ae81-33ba87f41d4c" containerName="registry-server" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.622930 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="8df61c74-a922-4222-a4d1-64f4d871c1cd" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.624374 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.637469 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xpsq6"] Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.764012 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-catalog-content\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.764115 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-utilities\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.764171 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p87xc\" (UniqueName: \"kubernetes.io/projected/c93eb071-07f4-40d4-b007-3e3665279f95-kube-api-access-p87xc\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.866525 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-utilities\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.866649 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p87xc\" (UniqueName: \"kubernetes.io/projected/c93eb071-07f4-40d4-b007-3e3665279f95-kube-api-access-p87xc\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.866765 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-catalog-content\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.867214 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-catalog-content\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.867500 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-utilities\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.889477 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p87xc\" (UniqueName: \"kubernetes.io/projected/c93eb071-07f4-40d4-b007-3e3665279f95-kube-api-access-p87xc\") pod \"community-operators-xpsq6\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:12 crc kubenswrapper[4811]: I0128 18:17:12.956014 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:13 crc kubenswrapper[4811]: I0128 18:17:13.521403 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xpsq6"] Jan 28 18:17:13 crc kubenswrapper[4811]: W0128 18:17:13.522298 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc93eb071_07f4_40d4_b007_3e3665279f95.slice/crio-8ee67b02c56dc3f9856038e1d7eb55a91675c4072230da93ac2fda1e089b66a9 WatchSource:0}: Error finding container 8ee67b02c56dc3f9856038e1d7eb55a91675c4072230da93ac2fda1e089b66a9: Status 404 returned error can't find the container with id 8ee67b02c56dc3f9856038e1d7eb55a91675c4072230da93ac2fda1e089b66a9 Jan 28 18:17:13 crc kubenswrapper[4811]: I0128 18:17:13.753350 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerStarted","Data":"a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2"} Jan 28 18:17:13 crc kubenswrapper[4811]: I0128 18:17:13.753392 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerStarted","Data":"8ee67b02c56dc3f9856038e1d7eb55a91675c4072230da93ac2fda1e089b66a9"} Jan 28 18:17:14 crc kubenswrapper[4811]: I0128 18:17:14.764475 4811 generic.go:334] "Generic (PLEG): container finished" podID="c93eb071-07f4-40d4-b007-3e3665279f95" containerID="a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2" exitCode=0 Jan 28 18:17:14 crc kubenswrapper[4811]: I0128 18:17:14.764571 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerDied","Data":"a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2"} Jan 28 18:17:16 crc kubenswrapper[4811]: I0128 18:17:16.786690 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerStarted","Data":"ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023"} Jan 28 18:17:18 crc kubenswrapper[4811]: I0128 18:17:18.807286 4811 generic.go:334] "Generic (PLEG): container finished" podID="c93eb071-07f4-40d4-b007-3e3665279f95" containerID="ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023" exitCode=0 Jan 28 18:17:18 crc kubenswrapper[4811]: I0128 18:17:18.807377 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerDied","Data":"ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023"} Jan 28 18:17:20 crc kubenswrapper[4811]: I0128 18:17:20.837386 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerStarted","Data":"a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a"} Jan 28 18:17:22 crc kubenswrapper[4811]: I0128 18:17:22.956818 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:22 crc kubenswrapper[4811]: I0128 18:17:22.957236 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:23 crc kubenswrapper[4811]: I0128 18:17:23.008915 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:23 crc kubenswrapper[4811]: I0128 18:17:23.036363 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xpsq6" podStartSLOduration=6.5746724069999996 podStartE2EDuration="11.036344824s" podCreationTimestamp="2026-01-28 18:17:12 +0000 UTC" firstStartedPulling="2026-01-28 18:17:14.767624568 +0000 UTC m=+9127.521988171" lastFinishedPulling="2026-01-28 18:17:19.229297005 +0000 UTC m=+9131.983660588" observedRunningTime="2026-01-28 18:17:20.874244654 +0000 UTC m=+9133.628608237" watchObservedRunningTime="2026-01-28 18:17:23.036344824 +0000 UTC m=+9135.790708407" Jan 28 18:17:23 crc kubenswrapper[4811]: I0128 18:17:23.341139 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:17:23 crc kubenswrapper[4811]: E0128 18:17:23.341810 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:17:33 crc kubenswrapper[4811]: I0128 18:17:33.008913 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:33 crc kubenswrapper[4811]: I0128 18:17:33.057097 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xpsq6"] Jan 28 18:17:33 crc kubenswrapper[4811]: I0128 18:17:33.976768 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xpsq6" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="registry-server" containerID="cri-o://a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a" gracePeriod=2 Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.524175 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.645898 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p87xc\" (UniqueName: \"kubernetes.io/projected/c93eb071-07f4-40d4-b007-3e3665279f95-kube-api-access-p87xc\") pod \"c93eb071-07f4-40d4-b007-3e3665279f95\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.646099 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-utilities\") pod \"c93eb071-07f4-40d4-b007-3e3665279f95\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.646132 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-catalog-content\") pod \"c93eb071-07f4-40d4-b007-3e3665279f95\" (UID: \"c93eb071-07f4-40d4-b007-3e3665279f95\") " Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.647522 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-utilities" (OuterVolumeSpecName: "utilities") pod "c93eb071-07f4-40d4-b007-3e3665279f95" (UID: "c93eb071-07f4-40d4-b007-3e3665279f95"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.652250 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c93eb071-07f4-40d4-b007-3e3665279f95-kube-api-access-p87xc" (OuterVolumeSpecName: "kube-api-access-p87xc") pod "c93eb071-07f4-40d4-b007-3e3665279f95" (UID: "c93eb071-07f4-40d4-b007-3e3665279f95"). InnerVolumeSpecName "kube-api-access-p87xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.695821 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c93eb071-07f4-40d4-b007-3e3665279f95" (UID: "c93eb071-07f4-40d4-b007-3e3665279f95"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.750849 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p87xc\" (UniqueName: \"kubernetes.io/projected/c93eb071-07f4-40d4-b007-3e3665279f95-kube-api-access-p87xc\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.750885 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.750896 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c93eb071-07f4-40d4-b007-3e3665279f95-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.987122 4811 generic.go:334] "Generic (PLEG): container finished" podID="c93eb071-07f4-40d4-b007-3e3665279f95" containerID="a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a" exitCode=0 Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.987416 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerDied","Data":"a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a"} Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.987452 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xpsq6" event={"ID":"c93eb071-07f4-40d4-b007-3e3665279f95","Type":"ContainerDied","Data":"8ee67b02c56dc3f9856038e1d7eb55a91675c4072230da93ac2fda1e089b66a9"} Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.987469 4811 scope.go:117] "RemoveContainer" containerID="a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a" Jan 28 18:17:34 crc kubenswrapper[4811]: I0128 18:17:34.987597 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xpsq6" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.022552 4811 scope.go:117] "RemoveContainer" containerID="ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.034374 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xpsq6"] Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.047048 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xpsq6"] Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.061647 4811 scope.go:117] "RemoveContainer" containerID="a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.101267 4811 scope.go:117] "RemoveContainer" containerID="a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a" Jan 28 18:17:35 crc kubenswrapper[4811]: E0128 18:17:35.107238 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a\": container with ID starting with a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a not found: ID does not exist" containerID="a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.107315 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a"} err="failed to get container status \"a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a\": rpc error: code = NotFound desc = could not find container \"a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a\": container with ID starting with a9a7ce84fd4b9bd228d7c86c540d26d9a596f2163a48fef0bb78b4e1da76ec9a not found: ID does not exist" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.107342 4811 scope.go:117] "RemoveContainer" containerID="ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023" Jan 28 18:17:35 crc kubenswrapper[4811]: E0128 18:17:35.107824 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023\": container with ID starting with ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023 not found: ID does not exist" containerID="ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.107876 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023"} err="failed to get container status \"ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023\": rpc error: code = NotFound desc = could not find container \"ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023\": container with ID starting with ea76c27824be27d7740676f2ac20d43a67063c33d5ad7fec86a8a50707e1c023 not found: ID does not exist" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.107908 4811 scope.go:117] "RemoveContainer" containerID="a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2" Jan 28 18:17:35 crc kubenswrapper[4811]: E0128 18:17:35.108347 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2\": container with ID starting with a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2 not found: ID does not exist" containerID="a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2" Jan 28 18:17:35 crc kubenswrapper[4811]: I0128 18:17:35.108389 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2"} err="failed to get container status \"a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2\": rpc error: code = NotFound desc = could not find container \"a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2\": container with ID starting with a368623fcfdd959e0efd9a821637af118f2d2e3764d4aa2c7713ad8d1415dde2 not found: ID does not exist" Jan 28 18:17:36 crc kubenswrapper[4811]: I0128 18:17:36.352942 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" path="/var/lib/kubelet/pods/c93eb071-07f4-40d4-b007-3e3665279f95/volumes" Jan 28 18:17:37 crc kubenswrapper[4811]: I0128 18:17:37.339057 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:17:37 crc kubenswrapper[4811]: E0128 18:17:37.339462 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:17:50 crc kubenswrapper[4811]: I0128 18:17:50.341938 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:17:50 crc kubenswrapper[4811]: E0128 18:17:50.344069 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:18:02 crc kubenswrapper[4811]: I0128 18:18:02.340020 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:18:02 crc kubenswrapper[4811]: E0128 18:18:02.340926 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:18:15 crc kubenswrapper[4811]: I0128 18:18:15.341524 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:18:15 crc kubenswrapper[4811]: E0128 18:18:15.342556 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:18:30 crc kubenswrapper[4811]: I0128 18:18:30.340385 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:18:30 crc kubenswrapper[4811]: E0128 18:18:30.341228 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:18:41 crc kubenswrapper[4811]: I0128 18:18:41.340004 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:18:41 crc kubenswrapper[4811]: E0128 18:18:41.341048 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:18:56 crc kubenswrapper[4811]: I0128 18:18:56.339779 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:18:56 crc kubenswrapper[4811]: E0128 18:18:56.341731 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:19:08 crc kubenswrapper[4811]: I0128 18:19:08.346939 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:19:08 crc kubenswrapper[4811]: E0128 18:19:08.347857 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:19:21 crc kubenswrapper[4811]: I0128 18:19:21.341475 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:19:21 crc kubenswrapper[4811]: E0128 18:19:21.342329 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:19:30 crc kubenswrapper[4811]: I0128 18:19:30.468127 4811 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-gmjnj container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 28 18:19:30 crc kubenswrapper[4811]: I0128 18:19:30.468751 4811 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gmjnj" podUID="c2b17da8-0524-4615-a9e5-f1c59a0fde0b" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 28 18:19:33 crc kubenswrapper[4811]: I0128 18:19:33.338982 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:19:33 crc kubenswrapper[4811]: E0128 18:19:33.339805 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:19:46 crc kubenswrapper[4811]: I0128 18:19:46.339629 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:19:46 crc kubenswrapper[4811]: E0128 18:19:46.340473 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:19:59 crc kubenswrapper[4811]: I0128 18:19:59.339820 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:19:59 crc kubenswrapper[4811]: E0128 18:19:59.340592 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.291003 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-lqzt5/must-gather-zrjtz"] Jan 28 18:20:08 crc kubenswrapper[4811]: E0128 18:20:08.292081 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="registry-server" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.292100 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="registry-server" Jan 28 18:20:08 crc kubenswrapper[4811]: E0128 18:20:08.292127 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="extract-content" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.292136 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="extract-content" Jan 28 18:20:08 crc kubenswrapper[4811]: E0128 18:20:08.292194 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="extract-utilities" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.292202 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="extract-utilities" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.292464 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c93eb071-07f4-40d4-b007-3e3665279f95" containerName="registry-server" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.293893 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.297100 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-lqzt5"/"kube-root-ca.crt" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.297364 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-lqzt5"/"openshift-service-ca.crt" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.301645 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-lqzt5"/"default-dockercfg-qbndh" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.301916 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-lqzt5/must-gather-zrjtz"] Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.396724 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40ce1205-d2e2-4470-9fda-dfcdaef4b274-must-gather-output\") pod \"must-gather-zrjtz\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.397124 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtrhm\" (UniqueName: \"kubernetes.io/projected/40ce1205-d2e2-4470-9fda-dfcdaef4b274-kube-api-access-vtrhm\") pod \"must-gather-zrjtz\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.499459 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtrhm\" (UniqueName: \"kubernetes.io/projected/40ce1205-d2e2-4470-9fda-dfcdaef4b274-kube-api-access-vtrhm\") pod \"must-gather-zrjtz\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.499624 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40ce1205-d2e2-4470-9fda-dfcdaef4b274-must-gather-output\") pod \"must-gather-zrjtz\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.500116 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40ce1205-d2e2-4470-9fda-dfcdaef4b274-must-gather-output\") pod \"must-gather-zrjtz\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.513341 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-lqzt5"/"kube-root-ca.crt" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.524224 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-lqzt5"/"openshift-service-ca.crt" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.549267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtrhm\" (UniqueName: \"kubernetes.io/projected/40ce1205-d2e2-4470-9fda-dfcdaef4b274-kube-api-access-vtrhm\") pod \"must-gather-zrjtz\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.616796 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-lqzt5"/"default-dockercfg-qbndh" Jan 28 18:20:08 crc kubenswrapper[4811]: I0128 18:20:08.625848 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:20:09 crc kubenswrapper[4811]: I0128 18:20:09.112490 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-lqzt5/must-gather-zrjtz"] Jan 28 18:20:09 crc kubenswrapper[4811]: I0128 18:20:09.118018 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 18:20:09 crc kubenswrapper[4811]: I0128 18:20:09.555181 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" event={"ID":"40ce1205-d2e2-4470-9fda-dfcdaef4b274","Type":"ContainerStarted","Data":"e8b7b7f33d731657c06c28fe49460cfef788a1f286ecaa5c5fd1617f1e75e649"} Jan 28 18:20:12 crc kubenswrapper[4811]: I0128 18:20:12.343677 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:20:12 crc kubenswrapper[4811]: E0128 18:20:12.344224 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:20:20 crc kubenswrapper[4811]: I0128 18:20:20.676848 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" event={"ID":"40ce1205-d2e2-4470-9fda-dfcdaef4b274","Type":"ContainerStarted","Data":"7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9"} Jan 28 18:20:21 crc kubenswrapper[4811]: I0128 18:20:21.702419 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" event={"ID":"40ce1205-d2e2-4470-9fda-dfcdaef4b274","Type":"ContainerStarted","Data":"b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d"} Jan 28 18:20:21 crc kubenswrapper[4811]: I0128 18:20:21.717187 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" podStartSLOduration=2.618904333 podStartE2EDuration="13.717162089s" podCreationTimestamp="2026-01-28 18:20:08 +0000 UTC" firstStartedPulling="2026-01-28 18:20:09.117675139 +0000 UTC m=+9301.872038722" lastFinishedPulling="2026-01-28 18:20:20.215932905 +0000 UTC m=+9312.970296478" observedRunningTime="2026-01-28 18:20:21.715497703 +0000 UTC m=+9314.469861286" watchObservedRunningTime="2026-01-28 18:20:21.717162089 +0000 UTC m=+9314.471525672" Jan 28 18:20:24 crc kubenswrapper[4811]: I0128 18:20:24.339187 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:20:24 crc kubenswrapper[4811]: E0128 18:20:24.340149 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.134253 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-lqzt5/crc-debug-7snxx"] Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.135873 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.179173 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz5hj\" (UniqueName: \"kubernetes.io/projected/fa590cd6-02d2-404c-8c3a-b4ad210887fd-kube-api-access-qz5hj\") pod \"crc-debug-7snxx\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.179349 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fa590cd6-02d2-404c-8c3a-b4ad210887fd-host\") pod \"crc-debug-7snxx\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.281284 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fa590cd6-02d2-404c-8c3a-b4ad210887fd-host\") pod \"crc-debug-7snxx\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.281411 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fa590cd6-02d2-404c-8c3a-b4ad210887fd-host\") pod \"crc-debug-7snxx\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.281525 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz5hj\" (UniqueName: \"kubernetes.io/projected/fa590cd6-02d2-404c-8c3a-b4ad210887fd-kube-api-access-qz5hj\") pod \"crc-debug-7snxx\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.698575 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz5hj\" (UniqueName: \"kubernetes.io/projected/fa590cd6-02d2-404c-8c3a-b4ad210887fd-kube-api-access-qz5hj\") pod \"crc-debug-7snxx\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: I0128 18:20:25.760856 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:20:25 crc kubenswrapper[4811]: W0128 18:20:25.797096 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa590cd6_02d2_404c_8c3a_b4ad210887fd.slice/crio-4bd12095ca1e6785675b27b63d1e4ced244e3873fffe584cb6abe4671ab9dda2 WatchSource:0}: Error finding container 4bd12095ca1e6785675b27b63d1e4ced244e3873fffe584cb6abe4671ab9dda2: Status 404 returned error can't find the container with id 4bd12095ca1e6785675b27b63d1e4ced244e3873fffe584cb6abe4671ab9dda2 Jan 28 18:20:26 crc kubenswrapper[4811]: I0128 18:20:26.769196 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" event={"ID":"fa590cd6-02d2-404c-8c3a-b4ad210887fd","Type":"ContainerStarted","Data":"4bd12095ca1e6785675b27b63d1e4ced244e3873fffe584cb6abe4671ab9dda2"} Jan 28 18:20:35 crc kubenswrapper[4811]: I0128 18:20:35.339915 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:20:35 crc kubenswrapper[4811]: E0128 18:20:35.340682 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:20:46 crc kubenswrapper[4811]: E0128 18:20:46.047246 4811 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Jan 28 18:20:46 crc kubenswrapper[4811]: E0128 18:20:46.047901 4811 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qz5hj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-7snxx_openshift-must-gather-lqzt5(fa590cd6-02d2-404c-8c3a-b4ad210887fd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 28 18:20:46 crc kubenswrapper[4811]: E0128 18:20:46.049045 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" podUID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" Jan 28 18:20:47 crc kubenswrapper[4811]: E0128 18:20:47.022790 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" podUID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" Jan 28 18:20:47 crc kubenswrapper[4811]: I0128 18:20:47.340202 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:20:47 crc kubenswrapper[4811]: E0128 18:20:47.341228 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:20:59 crc kubenswrapper[4811]: I0128 18:20:59.339973 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:20:59 crc kubenswrapper[4811]: E0128 18:20:59.340804 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:21:02 crc kubenswrapper[4811]: I0128 18:21:02.168209 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" event={"ID":"fa590cd6-02d2-404c-8c3a-b4ad210887fd","Type":"ContainerStarted","Data":"47d125b114e4af1d00b953196ad77a85ef042403b87347ef834cafbc9fe7d9f2"} Jan 28 18:21:02 crc kubenswrapper[4811]: I0128 18:21:02.182282 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" podStartSLOduration=2.138280456 podStartE2EDuration="37.182263412s" podCreationTimestamp="2026-01-28 18:20:25 +0000 UTC" firstStartedPulling="2026-01-28 18:20:25.799563977 +0000 UTC m=+9318.553927560" lastFinishedPulling="2026-01-28 18:21:00.843546943 +0000 UTC m=+9353.597910516" observedRunningTime="2026-01-28 18:21:02.18072797 +0000 UTC m=+9354.935091563" watchObservedRunningTime="2026-01-28 18:21:02.182263412 +0000 UTC m=+9354.936626995" Jan 28 18:21:13 crc kubenswrapper[4811]: I0128 18:21:13.339851 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:21:17 crc kubenswrapper[4811]: I0128 18:21:17.325484 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"e75a40ca7370f632a074790d1f85cf6e00f4bcbf3c9fa2826b8e2efe04680655"} Jan 28 18:21:27 crc kubenswrapper[4811]: I0128 18:21:27.424284 4811 generic.go:334] "Generic (PLEG): container finished" podID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" containerID="47d125b114e4af1d00b953196ad77a85ef042403b87347ef834cafbc9fe7d9f2" exitCode=0 Jan 28 18:21:27 crc kubenswrapper[4811]: I0128 18:21:27.424348 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" event={"ID":"fa590cd6-02d2-404c-8c3a-b4ad210887fd","Type":"ContainerDied","Data":"47d125b114e4af1d00b953196ad77a85ef042403b87347ef834cafbc9fe7d9f2"} Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.579912 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.632639 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-lqzt5/crc-debug-7snxx"] Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.642834 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-lqzt5/crc-debug-7snxx"] Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.768120 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fa590cd6-02d2-404c-8c3a-b4ad210887fd-host\") pod \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.768231 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa590cd6-02d2-404c-8c3a-b4ad210887fd-host" (OuterVolumeSpecName: "host") pod "fa590cd6-02d2-404c-8c3a-b4ad210887fd" (UID: "fa590cd6-02d2-404c-8c3a-b4ad210887fd"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.768551 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz5hj\" (UniqueName: \"kubernetes.io/projected/fa590cd6-02d2-404c-8c3a-b4ad210887fd-kube-api-access-qz5hj\") pod \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\" (UID: \"fa590cd6-02d2-404c-8c3a-b4ad210887fd\") " Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.769309 4811 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fa590cd6-02d2-404c-8c3a-b4ad210887fd-host\") on node \"crc\" DevicePath \"\"" Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.778085 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa590cd6-02d2-404c-8c3a-b4ad210887fd-kube-api-access-qz5hj" (OuterVolumeSpecName: "kube-api-access-qz5hj") pod "fa590cd6-02d2-404c-8c3a-b4ad210887fd" (UID: "fa590cd6-02d2-404c-8c3a-b4ad210887fd"). InnerVolumeSpecName "kube-api-access-qz5hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:21:28 crc kubenswrapper[4811]: I0128 18:21:28.871719 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz5hj\" (UniqueName: \"kubernetes.io/projected/fa590cd6-02d2-404c-8c3a-b4ad210887fd-kube-api-access-qz5hj\") on node \"crc\" DevicePath \"\"" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.461218 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bd12095ca1e6785675b27b63d1e4ced244e3873fffe584cb6abe4671ab9dda2" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.461290 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-7snxx" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.839196 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-lqzt5/crc-debug-fzdvt"] Jan 28 18:21:29 crc kubenswrapper[4811]: E0128 18:21:29.839654 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" containerName="container-00" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.839667 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" containerName="container-00" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.839870 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" containerName="container-00" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.840626 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.993058 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e825263-89fe-446a-aed9-48ddd448066e-host\") pod \"crc-debug-fzdvt\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:29 crc kubenswrapper[4811]: I0128 18:21:29.993501 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhgk4\" (UniqueName: \"kubernetes.io/projected/0e825263-89fe-446a-aed9-48ddd448066e-kube-api-access-hhgk4\") pod \"crc-debug-fzdvt\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.095967 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhgk4\" (UniqueName: \"kubernetes.io/projected/0e825263-89fe-446a-aed9-48ddd448066e-kube-api-access-hhgk4\") pod \"crc-debug-fzdvt\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.096230 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e825263-89fe-446a-aed9-48ddd448066e-host\") pod \"crc-debug-fzdvt\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.096298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e825263-89fe-446a-aed9-48ddd448066e-host\") pod \"crc-debug-fzdvt\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.122770 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhgk4\" (UniqueName: \"kubernetes.io/projected/0e825263-89fe-446a-aed9-48ddd448066e-kube-api-access-hhgk4\") pod \"crc-debug-fzdvt\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.161877 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.355062 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa590cd6-02d2-404c-8c3a-b4ad210887fd" path="/var/lib/kubelet/pods/fa590cd6-02d2-404c-8c3a-b4ad210887fd/volumes" Jan 28 18:21:30 crc kubenswrapper[4811]: I0128 18:21:30.471039 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" event={"ID":"0e825263-89fe-446a-aed9-48ddd448066e","Type":"ContainerStarted","Data":"3c909b88d3273b34fae6526f8c9766938e5d1fc780081d378c5757655a6c6dbf"} Jan 28 18:21:31 crc kubenswrapper[4811]: I0128 18:21:31.481637 4811 generic.go:334] "Generic (PLEG): container finished" podID="0e825263-89fe-446a-aed9-48ddd448066e" containerID="836732fb9ac59d21d968e2ae48741c8ea742e26b384fc9f65db30609e4dcf93c" exitCode=1 Jan 28 18:21:31 crc kubenswrapper[4811]: I0128 18:21:31.481742 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" event={"ID":"0e825263-89fe-446a-aed9-48ddd448066e","Type":"ContainerDied","Data":"836732fb9ac59d21d968e2ae48741c8ea742e26b384fc9f65db30609e4dcf93c"} Jan 28 18:21:31 crc kubenswrapper[4811]: I0128 18:21:31.522862 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-lqzt5/crc-debug-fzdvt"] Jan 28 18:21:31 crc kubenswrapper[4811]: I0128 18:21:31.535845 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-lqzt5/crc-debug-fzdvt"] Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.672695 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.773115 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhgk4\" (UniqueName: \"kubernetes.io/projected/0e825263-89fe-446a-aed9-48ddd448066e-kube-api-access-hhgk4\") pod \"0e825263-89fe-446a-aed9-48ddd448066e\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.773285 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e825263-89fe-446a-aed9-48ddd448066e-host\") pod \"0e825263-89fe-446a-aed9-48ddd448066e\" (UID: \"0e825263-89fe-446a-aed9-48ddd448066e\") " Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.773401 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e825263-89fe-446a-aed9-48ddd448066e-host" (OuterVolumeSpecName: "host") pod "0e825263-89fe-446a-aed9-48ddd448066e" (UID: "0e825263-89fe-446a-aed9-48ddd448066e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.774552 4811 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e825263-89fe-446a-aed9-48ddd448066e-host\") on node \"crc\" DevicePath \"\"" Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.793712 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e825263-89fe-446a-aed9-48ddd448066e-kube-api-access-hhgk4" (OuterVolumeSpecName: "kube-api-access-hhgk4") pod "0e825263-89fe-446a-aed9-48ddd448066e" (UID: "0e825263-89fe-446a-aed9-48ddd448066e"). InnerVolumeSpecName "kube-api-access-hhgk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:21:32 crc kubenswrapper[4811]: I0128 18:21:32.876192 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhgk4\" (UniqueName: \"kubernetes.io/projected/0e825263-89fe-446a-aed9-48ddd448066e-kube-api-access-hhgk4\") on node \"crc\" DevicePath \"\"" Jan 28 18:21:33 crc kubenswrapper[4811]: I0128 18:21:33.558491 4811 scope.go:117] "RemoveContainer" containerID="836732fb9ac59d21d968e2ae48741c8ea742e26b384fc9f65db30609e4dcf93c" Jan 28 18:21:33 crc kubenswrapper[4811]: I0128 18:21:33.558748 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/crc-debug-fzdvt" Jan 28 18:21:34 crc kubenswrapper[4811]: I0128 18:21:34.350067 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e825263-89fe-446a-aed9-48ddd448066e" path="/var/lib/kubelet/pods/0e825263-89fe-446a-aed9-48ddd448066e/volumes" Jan 28 18:23:33 crc kubenswrapper[4811]: I0128 18:23:33.087466 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:23:33 crc kubenswrapper[4811]: I0128 18:23:33.088063 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:24:03 crc kubenswrapper[4811]: I0128 18:24:03.087501 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:24:03 crc kubenswrapper[4811]: I0128 18:24:03.088062 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.087745 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.088487 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.088553 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.089650 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e75a40ca7370f632a074790d1f85cf6e00f4bcbf3c9fa2826b8e2efe04680655"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.089786 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://e75a40ca7370f632a074790d1f85cf6e00f4bcbf3c9fa2826b8e2efe04680655" gracePeriod=600 Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.707924 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="e75a40ca7370f632a074790d1f85cf6e00f4bcbf3c9fa2826b8e2efe04680655" exitCode=0 Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.707998 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"e75a40ca7370f632a074790d1f85cf6e00f4bcbf3c9fa2826b8e2efe04680655"} Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.708792 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e"} Jan 28 18:24:33 crc kubenswrapper[4811]: I0128 18:24:33.708824 4811 scope.go:117] "RemoveContainer" containerID="b4f78bf8172cfb57ab13039da08673144164a42ebc3fd398b0a09a9d5f8fcb12" Jan 28 18:25:57 crc kubenswrapper[4811]: I0128 18:25:57.950844 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v2xhf"] Jan 28 18:25:57 crc kubenswrapper[4811]: E0128 18:25:57.951943 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e825263-89fe-446a-aed9-48ddd448066e" containerName="container-00" Jan 28 18:25:57 crc kubenswrapper[4811]: I0128 18:25:57.951963 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e825263-89fe-446a-aed9-48ddd448066e" containerName="container-00" Jan 28 18:25:57 crc kubenswrapper[4811]: I0128 18:25:57.952235 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e825263-89fe-446a-aed9-48ddd448066e" containerName="container-00" Jan 28 18:25:57 crc kubenswrapper[4811]: I0128 18:25:57.957282 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:57 crc kubenswrapper[4811]: I0128 18:25:57.975643 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v2xhf"] Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.051723 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-catalog-content\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.051832 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4478\" (UniqueName: \"kubernetes.io/projected/49ced137-984a-4357-ab74-6ae8cf39b432-kube-api-access-z4478\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.052173 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-utilities\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.154204 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-utilities\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.154398 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-catalog-content\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.154461 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4478\" (UniqueName: \"kubernetes.io/projected/49ced137-984a-4357-ab74-6ae8cf39b432-kube-api-access-z4478\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.154749 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-utilities\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.155140 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-catalog-content\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.180540 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4478\" (UniqueName: \"kubernetes.io/projected/49ced137-984a-4357-ab74-6ae8cf39b432-kube-api-access-z4478\") pod \"certified-operators-v2xhf\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:58 crc kubenswrapper[4811]: I0128 18:25:58.282474 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:25:59 crc kubenswrapper[4811]: I0128 18:25:59.047384 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v2xhf"] Jan 28 18:25:59 crc kubenswrapper[4811]: I0128 18:25:59.680831 4811 generic.go:334] "Generic (PLEG): container finished" podID="49ced137-984a-4357-ab74-6ae8cf39b432" containerID="684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1" exitCode=0 Jan 28 18:25:59 crc kubenswrapper[4811]: I0128 18:25:59.681048 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerDied","Data":"684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1"} Jan 28 18:25:59 crc kubenswrapper[4811]: I0128 18:25:59.681183 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerStarted","Data":"8517191c7b53f0cd77ee0676082c3b8c00f6e9333c9d96b15b50d7478fa9ad60"} Jan 28 18:25:59 crc kubenswrapper[4811]: I0128 18:25:59.683385 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.731607 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s8gfl"] Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.734380 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.746563 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s8gfl"] Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.839060 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrpls\" (UniqueName: \"kubernetes.io/projected/fbd61373-3577-42b2-a65e-198d57048391-kube-api-access-zrpls\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.839159 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-utilities\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.839185 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-catalog-content\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.940742 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrpls\" (UniqueName: \"kubernetes.io/projected/fbd61373-3577-42b2-a65e-198d57048391-kube-api-access-zrpls\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.940870 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-utilities\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.940909 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-catalog-content\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.941452 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-utilities\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.941465 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-catalog-content\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:01 crc kubenswrapper[4811]: I0128 18:26:01.964277 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrpls\" (UniqueName: \"kubernetes.io/projected/fbd61373-3577-42b2-a65e-198d57048391-kube-api-access-zrpls\") pod \"redhat-marketplace-s8gfl\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:02 crc kubenswrapper[4811]: I0128 18:26:02.076482 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:02 crc kubenswrapper[4811]: I0128 18:26:02.609133 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s8gfl"] Jan 28 18:26:02 crc kubenswrapper[4811]: W0128 18:26:02.611280 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbd61373_3577_42b2_a65e_198d57048391.slice/crio-42434d0df60ac0a2a6c305277a2c3c725913e163b3ecfb0c1cafab3e252c5bee WatchSource:0}: Error finding container 42434d0df60ac0a2a6c305277a2c3c725913e163b3ecfb0c1cafab3e252c5bee: Status 404 returned error can't find the container with id 42434d0df60ac0a2a6c305277a2c3c725913e163b3ecfb0c1cafab3e252c5bee Jan 28 18:26:02 crc kubenswrapper[4811]: I0128 18:26:02.723692 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerStarted","Data":"ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1"} Jan 28 18:26:02 crc kubenswrapper[4811]: I0128 18:26:02.727111 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerStarted","Data":"42434d0df60ac0a2a6c305277a2c3c725913e163b3ecfb0c1cafab3e252c5bee"} Jan 28 18:26:03 crc kubenswrapper[4811]: I0128 18:26:03.738959 4811 generic.go:334] "Generic (PLEG): container finished" podID="fbd61373-3577-42b2-a65e-198d57048391" containerID="bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f" exitCode=0 Jan 28 18:26:03 crc kubenswrapper[4811]: I0128 18:26:03.739116 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerDied","Data":"bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f"} Jan 28 18:26:05 crc kubenswrapper[4811]: I0128 18:26:05.762889 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerStarted","Data":"d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b"} Jan 28 18:26:09 crc kubenswrapper[4811]: I0128 18:26:09.805925 4811 generic.go:334] "Generic (PLEG): container finished" podID="49ced137-984a-4357-ab74-6ae8cf39b432" containerID="ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1" exitCode=0 Jan 28 18:26:09 crc kubenswrapper[4811]: I0128 18:26:09.805986 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerDied","Data":"ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1"} Jan 28 18:26:16 crc kubenswrapper[4811]: I0128 18:26:16.884128 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerStarted","Data":"522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb"} Jan 28 18:26:16 crc kubenswrapper[4811]: I0128 18:26:16.886260 4811 generic.go:334] "Generic (PLEG): container finished" podID="fbd61373-3577-42b2-a65e-198d57048391" containerID="d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b" exitCode=0 Jan 28 18:26:16 crc kubenswrapper[4811]: I0128 18:26:16.886310 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerDied","Data":"d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b"} Jan 28 18:26:16 crc kubenswrapper[4811]: I0128 18:26:16.924732 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v2xhf" podStartSLOduration=3.6394551010000002 podStartE2EDuration="19.924714935s" podCreationTimestamp="2026-01-28 18:25:57 +0000 UTC" firstStartedPulling="2026-01-28 18:25:59.683110863 +0000 UTC m=+9652.437474456" lastFinishedPulling="2026-01-28 18:26:15.968370667 +0000 UTC m=+9668.722734290" observedRunningTime="2026-01-28 18:26:16.914674685 +0000 UTC m=+9669.669038268" watchObservedRunningTime="2026-01-28 18:26:16.924714935 +0000 UTC m=+9669.679078518" Jan 28 18:26:18 crc kubenswrapper[4811]: I0128 18:26:18.283944 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:26:18 crc kubenswrapper[4811]: I0128 18:26:18.284271 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:26:19 crc kubenswrapper[4811]: I0128 18:26:19.330013 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-v2xhf" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="registry-server" probeResult="failure" output=< Jan 28 18:26:19 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:26:19 crc kubenswrapper[4811]: > Jan 28 18:26:19 crc kubenswrapper[4811]: I0128 18:26:19.922071 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerStarted","Data":"ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896"} Jan 28 18:26:19 crc kubenswrapper[4811]: I0128 18:26:19.971864 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s8gfl" podStartSLOduration=3.516837503 podStartE2EDuration="18.971842412s" podCreationTimestamp="2026-01-28 18:26:01 +0000 UTC" firstStartedPulling="2026-01-28 18:26:03.740667094 +0000 UTC m=+9656.495030677" lastFinishedPulling="2026-01-28 18:26:19.195672003 +0000 UTC m=+9671.950035586" observedRunningTime="2026-01-28 18:26:19.942732219 +0000 UTC m=+9672.697095822" watchObservedRunningTime="2026-01-28 18:26:19.971842412 +0000 UTC m=+9672.726205995" Jan 28 18:26:22 crc kubenswrapper[4811]: I0128 18:26:22.076720 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:22 crc kubenswrapper[4811]: I0128 18:26:22.077087 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:23 crc kubenswrapper[4811]: I0128 18:26:23.130708 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-s8gfl" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="registry-server" probeResult="failure" output=< Jan 28 18:26:23 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:26:23 crc kubenswrapper[4811]: > Jan 28 18:26:28 crc kubenswrapper[4811]: I0128 18:26:28.334336 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:26:28 crc kubenswrapper[4811]: I0128 18:26:28.388127 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:26:29 crc kubenswrapper[4811]: I0128 18:26:29.159490 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v2xhf"] Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.011457 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v2xhf" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="registry-server" containerID="cri-o://522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb" gracePeriod=2 Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.524532 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.545998 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-catalog-content\") pod \"49ced137-984a-4357-ab74-6ae8cf39b432\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.546088 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-utilities\") pod \"49ced137-984a-4357-ab74-6ae8cf39b432\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.546139 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4478\" (UniqueName: \"kubernetes.io/projected/49ced137-984a-4357-ab74-6ae8cf39b432-kube-api-access-z4478\") pod \"49ced137-984a-4357-ab74-6ae8cf39b432\" (UID: \"49ced137-984a-4357-ab74-6ae8cf39b432\") " Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.548946 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-utilities" (OuterVolumeSpecName: "utilities") pod "49ced137-984a-4357-ab74-6ae8cf39b432" (UID: "49ced137-984a-4357-ab74-6ae8cf39b432"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.555193 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ced137-984a-4357-ab74-6ae8cf39b432-kube-api-access-z4478" (OuterVolumeSpecName: "kube-api-access-z4478") pod "49ced137-984a-4357-ab74-6ae8cf39b432" (UID: "49ced137-984a-4357-ab74-6ae8cf39b432"). InnerVolumeSpecName "kube-api-access-z4478". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.613259 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49ced137-984a-4357-ab74-6ae8cf39b432" (UID: "49ced137-984a-4357-ab74-6ae8cf39b432"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.648557 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.648774 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49ced137-984a-4357-ab74-6ae8cf39b432-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:26:30 crc kubenswrapper[4811]: I0128 18:26:30.648855 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4478\" (UniqueName: \"kubernetes.io/projected/49ced137-984a-4357-ab74-6ae8cf39b432-kube-api-access-z4478\") on node \"crc\" DevicePath \"\"" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.021548 4811 generic.go:334] "Generic (PLEG): container finished" podID="49ced137-984a-4357-ab74-6ae8cf39b432" containerID="522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb" exitCode=0 Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.021603 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerDied","Data":"522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb"} Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.021643 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v2xhf" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.022532 4811 scope.go:117] "RemoveContainer" containerID="522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.022400 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2xhf" event={"ID":"49ced137-984a-4357-ab74-6ae8cf39b432","Type":"ContainerDied","Data":"8517191c7b53f0cd77ee0676082c3b8c00f6e9333c9d96b15b50d7478fa9ad60"} Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.040897 4811 scope.go:117] "RemoveContainer" containerID="ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.068863 4811 scope.go:117] "RemoveContainer" containerID="684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.076496 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v2xhf"] Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.087397 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v2xhf"] Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.122103 4811 scope.go:117] "RemoveContainer" containerID="522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb" Jan 28 18:26:31 crc kubenswrapper[4811]: E0128 18:26:31.122657 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb\": container with ID starting with 522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb not found: ID does not exist" containerID="522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.122705 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb"} err="failed to get container status \"522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb\": rpc error: code = NotFound desc = could not find container \"522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb\": container with ID starting with 522f271028b944cd8656c9d3f76240d90fee5006670aa3550e202b471a6778bb not found: ID does not exist" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.122735 4811 scope.go:117] "RemoveContainer" containerID="ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1" Jan 28 18:26:31 crc kubenswrapper[4811]: E0128 18:26:31.123292 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1\": container with ID starting with ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1 not found: ID does not exist" containerID="ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.123327 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1"} err="failed to get container status \"ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1\": rpc error: code = NotFound desc = could not find container \"ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1\": container with ID starting with ad367ee1ecdedc3521d2241ff89d156ed6e3e19a7fac5d1b5fc013bbae2fdcb1 not found: ID does not exist" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.123352 4811 scope.go:117] "RemoveContainer" containerID="684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1" Jan 28 18:26:31 crc kubenswrapper[4811]: E0128 18:26:31.123768 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1\": container with ID starting with 684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1 not found: ID does not exist" containerID="684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1" Jan 28 18:26:31 crc kubenswrapper[4811]: I0128 18:26:31.124001 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1"} err="failed to get container status \"684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1\": rpc error: code = NotFound desc = could not find container \"684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1\": container with ID starting with 684d5518bcdb39274fc4cd4ae1e2bf5d3cfd9fae2d488bcba55067f82a0d77e1 not found: ID does not exist" Jan 28 18:26:32 crc kubenswrapper[4811]: I0128 18:26:32.125892 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:32 crc kubenswrapper[4811]: I0128 18:26:32.180217 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:32 crc kubenswrapper[4811]: I0128 18:26:32.353072 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" path="/var/lib/kubelet/pods/49ced137-984a-4357-ab74-6ae8cf39b432/volumes" Jan 28 18:26:33 crc kubenswrapper[4811]: I0128 18:26:33.087157 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:26:33 crc kubenswrapper[4811]: I0128 18:26:33.087211 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:26:33 crc kubenswrapper[4811]: I0128 18:26:33.554706 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s8gfl"] Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.060492 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s8gfl" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="registry-server" containerID="cri-o://ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896" gracePeriod=2 Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.613058 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.735540 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-utilities\") pod \"fbd61373-3577-42b2-a65e-198d57048391\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.736030 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-catalog-content\") pod \"fbd61373-3577-42b2-a65e-198d57048391\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.736108 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrpls\" (UniqueName: \"kubernetes.io/projected/fbd61373-3577-42b2-a65e-198d57048391-kube-api-access-zrpls\") pod \"fbd61373-3577-42b2-a65e-198d57048391\" (UID: \"fbd61373-3577-42b2-a65e-198d57048391\") " Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.736738 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-utilities" (OuterVolumeSpecName: "utilities") pod "fbd61373-3577-42b2-a65e-198d57048391" (UID: "fbd61373-3577-42b2-a65e-198d57048391"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.743705 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbd61373-3577-42b2-a65e-198d57048391-kube-api-access-zrpls" (OuterVolumeSpecName: "kube-api-access-zrpls") pod "fbd61373-3577-42b2-a65e-198d57048391" (UID: "fbd61373-3577-42b2-a65e-198d57048391"). InnerVolumeSpecName "kube-api-access-zrpls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.765396 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbd61373-3577-42b2-a65e-198d57048391" (UID: "fbd61373-3577-42b2-a65e-198d57048391"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.838663 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.838696 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrpls\" (UniqueName: \"kubernetes.io/projected/fbd61373-3577-42b2-a65e-198d57048391-kube-api-access-zrpls\") on node \"crc\" DevicePath \"\"" Jan 28 18:26:34 crc kubenswrapper[4811]: I0128 18:26:34.838708 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbd61373-3577-42b2-a65e-198d57048391-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.073193 4811 generic.go:334] "Generic (PLEG): container finished" podID="fbd61373-3577-42b2-a65e-198d57048391" containerID="ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896" exitCode=0 Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.073255 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerDied","Data":"ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896"} Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.073285 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s8gfl" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.073315 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s8gfl" event={"ID":"fbd61373-3577-42b2-a65e-198d57048391","Type":"ContainerDied","Data":"42434d0df60ac0a2a6c305277a2c3c725913e163b3ecfb0c1cafab3e252c5bee"} Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.073344 4811 scope.go:117] "RemoveContainer" containerID="ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.095976 4811 scope.go:117] "RemoveContainer" containerID="d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.120221 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s8gfl"] Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.124354 4811 scope.go:117] "RemoveContainer" containerID="bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.133040 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s8gfl"] Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.175900 4811 scope.go:117] "RemoveContainer" containerID="ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896" Jan 28 18:26:35 crc kubenswrapper[4811]: E0128 18:26:35.177248 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896\": container with ID starting with ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896 not found: ID does not exist" containerID="ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.177291 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896"} err="failed to get container status \"ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896\": rpc error: code = NotFound desc = could not find container \"ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896\": container with ID starting with ce296170694722b15b831d389d8f7a1d70d138d2d923cdda3277bbf9f0939896 not found: ID does not exist" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.177314 4811 scope.go:117] "RemoveContainer" containerID="d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b" Jan 28 18:26:35 crc kubenswrapper[4811]: E0128 18:26:35.177711 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b\": container with ID starting with d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b not found: ID does not exist" containerID="d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.177735 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b"} err="failed to get container status \"d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b\": rpc error: code = NotFound desc = could not find container \"d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b\": container with ID starting with d451c61f041d191bb59210a9a34420d90db2e444c7811b6a6e029d54f4cb500b not found: ID does not exist" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.177782 4811 scope.go:117] "RemoveContainer" containerID="bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f" Jan 28 18:26:35 crc kubenswrapper[4811]: E0128 18:26:35.177996 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f\": container with ID starting with bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f not found: ID does not exist" containerID="bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f" Jan 28 18:26:35 crc kubenswrapper[4811]: I0128 18:26:35.178010 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f"} err="failed to get container status \"bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f\": rpc error: code = NotFound desc = could not find container \"bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f\": container with ID starting with bbee6a55c510a006b38ba0b602480e4cd3a964af0d8122d53b2eb71575770b6f not found: ID does not exist" Jan 28 18:26:35 crc kubenswrapper[4811]: E0128 18:26:35.190592 4811 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbd61373_3577_42b2_a65e_198d57048391.slice/crio-42434d0df60ac0a2a6c305277a2c3c725913e163b3ecfb0c1cafab3e252c5bee\": RecentStats: unable to find data in memory cache]" Jan 28 18:26:36 crc kubenswrapper[4811]: I0128 18:26:36.352228 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbd61373-3577-42b2-a65e-198d57048391" path="/var/lib/kubelet/pods/fbd61373-3577-42b2-a65e-198d57048391/volumes" Jan 28 18:27:03 crc kubenswrapper[4811]: I0128 18:27:03.087958 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:27:03 crc kubenswrapper[4811]: I0128 18:27:03.088692 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.087686 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.088087 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.088664 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.089624 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.089692 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" gracePeriod=600 Jan 28 18:27:33 crc kubenswrapper[4811]: E0128 18:27:33.224231 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.705383 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" exitCode=0 Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.705472 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e"} Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.705522 4811 scope.go:117] "RemoveContainer" containerID="e75a40ca7370f632a074790d1f85cf6e00f4bcbf3c9fa2826b8e2efe04680655" Jan 28 18:27:33 crc kubenswrapper[4811]: I0128 18:27:33.706270 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:27:33 crc kubenswrapper[4811]: E0128 18:27:33.706686 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:27:39 crc kubenswrapper[4811]: I0128 18:27:39.042518 4811 scope.go:117] "RemoveContainer" containerID="47d125b114e4af1d00b953196ad77a85ef042403b87347ef834cafbc9fe7d9f2" Jan 28 18:27:45 crc kubenswrapper[4811]: I0128 18:27:45.339730 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:27:45 crc kubenswrapper[4811]: E0128 18:27:45.340525 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:27:59 crc kubenswrapper[4811]: I0128 18:27:59.339905 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:27:59 crc kubenswrapper[4811]: E0128 18:27:59.340749 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:28:11 crc kubenswrapper[4811]: I0128 18:28:11.340639 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:28:11 crc kubenswrapper[4811]: E0128 18:28:11.341994 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:28:22 crc kubenswrapper[4811]: I0128 18:28:22.340382 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:28:22 crc kubenswrapper[4811]: E0128 18:28:22.341145 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:28:37 crc kubenswrapper[4811]: I0128 18:28:37.340118 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:28:37 crc kubenswrapper[4811]: E0128 18:28:37.341023 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:28:52 crc kubenswrapper[4811]: I0128 18:28:52.339376 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:28:52 crc kubenswrapper[4811]: E0128 18:28:52.340187 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:29:07 crc kubenswrapper[4811]: I0128 18:29:07.341305 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:29:07 crc kubenswrapper[4811]: E0128 18:29:07.342055 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.024178 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-knfcf"] Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.025361 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="extract-utilities" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025379 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="extract-utilities" Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.025416 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="registry-server" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025425 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="registry-server" Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.025478 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="extract-content" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025488 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="extract-content" Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.025506 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="registry-server" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025517 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="registry-server" Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.025535 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="extract-utilities" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025546 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="extract-utilities" Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.025561 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="extract-content" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025569 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="extract-content" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025880 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbd61373-3577-42b2-a65e-198d57048391" containerName="registry-server" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.025920 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="49ced137-984a-4357-ab74-6ae8cf39b432" containerName="registry-server" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.028598 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.066640 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-knfcf"] Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.216464 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-86t7h"] Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.219026 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.227371 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-catalog-content\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.227457 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-utilities\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.227643 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhhh7\" (UniqueName: \"kubernetes.io/projected/d28c7e85-6453-41d7-a3e9-25eb14048e03-kube-api-access-rhhh7\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.244310 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86t7h"] Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.329896 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhhh7\" (UniqueName: \"kubernetes.io/projected/d28c7e85-6453-41d7-a3e9-25eb14048e03-kube-api-access-rhhh7\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.330058 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-catalog-content\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.330114 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-utilities\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.330137 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-catalog-content\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.330181 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-utilities\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.330256 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkqhd\" (UniqueName: \"kubernetes.io/projected/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-kube-api-access-jkqhd\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.331267 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-utilities\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.331298 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-catalog-content\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.340417 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:29:21 crc kubenswrapper[4811]: E0128 18:29:21.340690 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.354519 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhhh7\" (UniqueName: \"kubernetes.io/projected/d28c7e85-6453-41d7-a3e9-25eb14048e03-kube-api-access-rhhh7\") pod \"community-operators-knfcf\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.359240 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.432032 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkqhd\" (UniqueName: \"kubernetes.io/projected/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-kube-api-access-jkqhd\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.432284 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-catalog-content\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.432324 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-utilities\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.432769 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-catalog-content\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.432822 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-utilities\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.450404 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkqhd\" (UniqueName: \"kubernetes.io/projected/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-kube-api-access-jkqhd\") pod \"redhat-operators-86t7h\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.541181 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:21 crc kubenswrapper[4811]: I0128 18:29:21.954183 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-knfcf"] Jan 28 18:29:21 crc kubenswrapper[4811]: W0128 18:29:21.955940 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd28c7e85_6453_41d7_a3e9_25eb14048e03.slice/crio-e2427fc890eae544f4c91f775b35ac68585148ceef4649bd212ac62a4fb49a97 WatchSource:0}: Error finding container e2427fc890eae544f4c91f775b35ac68585148ceef4649bd212ac62a4fb49a97: Status 404 returned error can't find the container with id e2427fc890eae544f4c91f775b35ac68585148ceef4649bd212ac62a4fb49a97 Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.129196 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86t7h"] Jan 28 18:29:22 crc kubenswrapper[4811]: W0128 18:29:22.134923 4811 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad99f0bf_4e34_4b52_a4ce_65565bc153a4.slice/crio-f48fff3e6663404984dd0d70ea68534c82019ee6db52bed3a8b3b279fcfbc7c6 WatchSource:0}: Error finding container f48fff3e6663404984dd0d70ea68534c82019ee6db52bed3a8b3b279fcfbc7c6: Status 404 returned error can't find the container with id f48fff3e6663404984dd0d70ea68534c82019ee6db52bed3a8b3b279fcfbc7c6 Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.826915 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerID="40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d" exitCode=0 Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.826962 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerDied","Data":"40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d"} Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.827200 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerStarted","Data":"f48fff3e6663404984dd0d70ea68534c82019ee6db52bed3a8b3b279fcfbc7c6"} Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.828502 4811 generic.go:334] "Generic (PLEG): container finished" podID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerID="bfa211dd5a52286565622f207ee58eb995d124b27cd96352b2d2d4f10103a308" exitCode=0 Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.828524 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerDied","Data":"bfa211dd5a52286565622f207ee58eb995d124b27cd96352b2d2d4f10103a308"} Jan 28 18:29:22 crc kubenswrapper[4811]: I0128 18:29:22.828540 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerStarted","Data":"e2427fc890eae544f4c91f775b35ac68585148ceef4649bd212ac62a4fb49a97"} Jan 28 18:29:24 crc kubenswrapper[4811]: I0128 18:29:24.871818 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerStarted","Data":"131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5"} Jan 28 18:29:24 crc kubenswrapper[4811]: I0128 18:29:24.879984 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerStarted","Data":"001c44a0d34bb5bc059ba3bde4e8b79ed98b14776cd68c59b478e617de788461"} Jan 28 18:29:26 crc kubenswrapper[4811]: I0128 18:29:26.909879 4811 generic.go:334] "Generic (PLEG): container finished" podID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerID="001c44a0d34bb5bc059ba3bde4e8b79ed98b14776cd68c59b478e617de788461" exitCode=0 Jan 28 18:29:26 crc kubenswrapper[4811]: I0128 18:29:26.910101 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerDied","Data":"001c44a0d34bb5bc059ba3bde4e8b79ed98b14776cd68c59b478e617de788461"} Jan 28 18:29:30 crc kubenswrapper[4811]: I0128 18:29:30.970216 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerStarted","Data":"56713acd2b4136cc6a550f9a908b8a7e3abab95cd74aa431f498c413f5684f90"} Jan 28 18:29:31 crc kubenswrapper[4811]: I0128 18:29:31.001522 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-knfcf" podStartSLOduration=4.085240428 podStartE2EDuration="11.001502014s" podCreationTimestamp="2026-01-28 18:29:20 +0000 UTC" firstStartedPulling="2026-01-28 18:29:22.831123085 +0000 UTC m=+9855.585486668" lastFinishedPulling="2026-01-28 18:29:29.747384651 +0000 UTC m=+9862.501748254" observedRunningTime="2026-01-28 18:29:30.999970282 +0000 UTC m=+9863.754333875" watchObservedRunningTime="2026-01-28 18:29:31.001502014 +0000 UTC m=+9863.755865607" Jan 28 18:29:31 crc kubenswrapper[4811]: I0128 18:29:31.359616 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:31 crc kubenswrapper[4811]: I0128 18:29:31.360030 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:32 crc kubenswrapper[4811]: I0128 18:29:32.340411 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:29:32 crc kubenswrapper[4811]: E0128 18:29:32.340795 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:29:32 crc kubenswrapper[4811]: I0128 18:29:32.410287 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-knfcf" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="registry-server" probeResult="failure" output=< Jan 28 18:29:32 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:29:32 crc kubenswrapper[4811]: > Jan 28 18:29:41 crc kubenswrapper[4811]: I0128 18:29:41.442740 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:41 crc kubenswrapper[4811]: I0128 18:29:41.500145 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:41 crc kubenswrapper[4811]: I0128 18:29:41.691414 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-knfcf"] Jan 28 18:29:42 crc kubenswrapper[4811]: I0128 18:29:42.081774 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerID="131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5" exitCode=0 Jan 28 18:29:42 crc kubenswrapper[4811]: I0128 18:29:42.081872 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerDied","Data":"131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5"} Jan 28 18:29:43 crc kubenswrapper[4811]: I0128 18:29:43.094373 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-knfcf" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="registry-server" containerID="cri-o://56713acd2b4136cc6a550f9a908b8a7e3abab95cd74aa431f498c413f5684f90" gracePeriod=2 Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.109202 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerStarted","Data":"dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04"} Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.113608 4811 generic.go:334] "Generic (PLEG): container finished" podID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerID="56713acd2b4136cc6a550f9a908b8a7e3abab95cd74aa431f498c413f5684f90" exitCode=0 Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.113636 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerDied","Data":"56713acd2b4136cc6a550f9a908b8a7e3abab95cd74aa431f498c413f5684f90"} Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.147461 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-86t7h" podStartSLOduration=2.853445888 podStartE2EDuration="23.147439957s" podCreationTimestamp="2026-01-28 18:29:21 +0000 UTC" firstStartedPulling="2026-01-28 18:29:22.828917255 +0000 UTC m=+9855.583280838" lastFinishedPulling="2026-01-28 18:29:43.122911324 +0000 UTC m=+9875.877274907" observedRunningTime="2026-01-28 18:29:44.137592862 +0000 UTC m=+9876.891956445" watchObservedRunningTime="2026-01-28 18:29:44.147439957 +0000 UTC m=+9876.901803540" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.409384 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.458712 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-catalog-content\") pod \"d28c7e85-6453-41d7-a3e9-25eb14048e03\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.458855 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhhh7\" (UniqueName: \"kubernetes.io/projected/d28c7e85-6453-41d7-a3e9-25eb14048e03-kube-api-access-rhhh7\") pod \"d28c7e85-6453-41d7-a3e9-25eb14048e03\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.458916 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-utilities\") pod \"d28c7e85-6453-41d7-a3e9-25eb14048e03\" (UID: \"d28c7e85-6453-41d7-a3e9-25eb14048e03\") " Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.461730 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-utilities" (OuterVolumeSpecName: "utilities") pod "d28c7e85-6453-41d7-a3e9-25eb14048e03" (UID: "d28c7e85-6453-41d7-a3e9-25eb14048e03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.486388 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d28c7e85-6453-41d7-a3e9-25eb14048e03-kube-api-access-rhhh7" (OuterVolumeSpecName: "kube-api-access-rhhh7") pod "d28c7e85-6453-41d7-a3e9-25eb14048e03" (UID: "d28c7e85-6453-41d7-a3e9-25eb14048e03"). InnerVolumeSpecName "kube-api-access-rhhh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.510913 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d28c7e85-6453-41d7-a3e9-25eb14048e03" (UID: "d28c7e85-6453-41d7-a3e9-25eb14048e03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.562344 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.562396 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhhh7\" (UniqueName: \"kubernetes.io/projected/d28c7e85-6453-41d7-a3e9-25eb14048e03-kube-api-access-rhhh7\") on node \"crc\" DevicePath \"\"" Jan 28 18:29:44 crc kubenswrapper[4811]: I0128 18:29:44.562414 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d28c7e85-6453-41d7-a3e9-25eb14048e03-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.131553 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-knfcf" event={"ID":"d28c7e85-6453-41d7-a3e9-25eb14048e03","Type":"ContainerDied","Data":"e2427fc890eae544f4c91f775b35ac68585148ceef4649bd212ac62a4fb49a97"} Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.132019 4811 scope.go:117] "RemoveContainer" containerID="56713acd2b4136cc6a550f9a908b8a7e3abab95cd74aa431f498c413f5684f90" Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.131641 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-knfcf" Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.154658 4811 scope.go:117] "RemoveContainer" containerID="001c44a0d34bb5bc059ba3bde4e8b79ed98b14776cd68c59b478e617de788461" Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.199508 4811 scope.go:117] "RemoveContainer" containerID="bfa211dd5a52286565622f207ee58eb995d124b27cd96352b2d2d4f10103a308" Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.200660 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-knfcf"] Jan 28 18:29:45 crc kubenswrapper[4811]: I0128 18:29:45.215756 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-knfcf"] Jan 28 18:29:46 crc kubenswrapper[4811]: I0128 18:29:46.340532 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:29:46 crc kubenswrapper[4811]: E0128 18:29:46.341146 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:29:46 crc kubenswrapper[4811]: I0128 18:29:46.360087 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" path="/var/lib/kubelet/pods/d28c7e85-6453-41d7-a3e9-25eb14048e03/volumes" Jan 28 18:29:51 crc kubenswrapper[4811]: I0128 18:29:51.542564 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:51 crc kubenswrapper[4811]: I0128 18:29:51.543154 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:51 crc kubenswrapper[4811]: I0128 18:29:51.585413 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:52 crc kubenswrapper[4811]: I0128 18:29:52.270745 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:53 crc kubenswrapper[4811]: I0128 18:29:53.218944 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86t7h"] Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.226479 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-86t7h" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="registry-server" containerID="cri-o://dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04" gracePeriod=2 Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.778556 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.915651 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-utilities\") pod \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.915993 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkqhd\" (UniqueName: \"kubernetes.io/projected/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-kube-api-access-jkqhd\") pod \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.916171 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-catalog-content\") pod \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\" (UID: \"ad99f0bf-4e34-4b52-a4ce-65565bc153a4\") " Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.916663 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-utilities" (OuterVolumeSpecName: "utilities") pod "ad99f0bf-4e34-4b52-a4ce-65565bc153a4" (UID: "ad99f0bf-4e34-4b52-a4ce-65565bc153a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.917168 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:29:54 crc kubenswrapper[4811]: I0128 18:29:54.927879 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-kube-api-access-jkqhd" (OuterVolumeSpecName: "kube-api-access-jkqhd") pod "ad99f0bf-4e34-4b52-a4ce-65565bc153a4" (UID: "ad99f0bf-4e34-4b52-a4ce-65565bc153a4"). InnerVolumeSpecName "kube-api-access-jkqhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.019907 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkqhd\" (UniqueName: \"kubernetes.io/projected/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-kube-api-access-jkqhd\") on node \"crc\" DevicePath \"\"" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.039688 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad99f0bf-4e34-4b52-a4ce-65565bc153a4" (UID: "ad99f0bf-4e34-4b52-a4ce-65565bc153a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.122082 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad99f0bf-4e34-4b52-a4ce-65565bc153a4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.236906 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerDied","Data":"dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04"} Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.236977 4811 scope.go:117] "RemoveContainer" containerID="dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.236954 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86t7h" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.237117 4811 generic.go:334] "Generic (PLEG): container finished" podID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerID="dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04" exitCode=0 Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.237201 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86t7h" event={"ID":"ad99f0bf-4e34-4b52-a4ce-65565bc153a4","Type":"ContainerDied","Data":"f48fff3e6663404984dd0d70ea68534c82019ee6db52bed3a8b3b279fcfbc7c6"} Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.268883 4811 scope.go:117] "RemoveContainer" containerID="131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.273993 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86t7h"] Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.283151 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-86t7h"] Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.294116 4811 scope.go:117] "RemoveContainer" containerID="40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.338710 4811 scope.go:117] "RemoveContainer" containerID="dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04" Jan 28 18:29:55 crc kubenswrapper[4811]: E0128 18:29:55.339159 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04\": container with ID starting with dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04 not found: ID does not exist" containerID="dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.339191 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04"} err="failed to get container status \"dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04\": rpc error: code = NotFound desc = could not find container \"dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04\": container with ID starting with dc65f9790b4c4e0e22dac423ece8f63fd12b27ccf7c8e2aee1c22b34e554af04 not found: ID does not exist" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.339216 4811 scope.go:117] "RemoveContainer" containerID="131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5" Jan 28 18:29:55 crc kubenswrapper[4811]: E0128 18:29:55.339599 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5\": container with ID starting with 131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5 not found: ID does not exist" containerID="131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.339637 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5"} err="failed to get container status \"131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5\": rpc error: code = NotFound desc = could not find container \"131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5\": container with ID starting with 131e99d4502ae47b40bf44ce2a29c0e4c6cc5714bf853372cbccf957289992e5 not found: ID does not exist" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.339664 4811 scope.go:117] "RemoveContainer" containerID="40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d" Jan 28 18:29:55 crc kubenswrapper[4811]: E0128 18:29:55.339919 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d\": container with ID starting with 40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d not found: ID does not exist" containerID="40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d" Jan 28 18:29:55 crc kubenswrapper[4811]: I0128 18:29:55.339949 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d"} err="failed to get container status \"40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d\": rpc error: code = NotFound desc = could not find container \"40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d\": container with ID starting with 40f9f35ad35afdc98b5bde8532eca1e3dae91d789f0a64dc798772f4a532d66d not found: ID does not exist" Jan 28 18:29:56 crc kubenswrapper[4811]: I0128 18:29:56.358331 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" path="/var/lib/kubelet/pods/ad99f0bf-4e34-4b52-a4ce-65565bc153a4/volumes" Jan 28 18:29:59 crc kubenswrapper[4811]: I0128 18:29:59.339771 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:29:59 crc kubenswrapper[4811]: E0128 18:29:59.340566 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153065 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6"] Jan 28 18:30:00 crc kubenswrapper[4811]: E0128 18:30:00.153762 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="extract-utilities" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153779 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="extract-utilities" Jan 28 18:30:00 crc kubenswrapper[4811]: E0128 18:30:00.153811 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="extract-utilities" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153817 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="extract-utilities" Jan 28 18:30:00 crc kubenswrapper[4811]: E0128 18:30:00.153829 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="extract-content" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153835 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="extract-content" Jan 28 18:30:00 crc kubenswrapper[4811]: E0128 18:30:00.153841 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="registry-server" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153847 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="registry-server" Jan 28 18:30:00 crc kubenswrapper[4811]: E0128 18:30:00.153855 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="extract-content" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153862 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="extract-content" Jan 28 18:30:00 crc kubenswrapper[4811]: E0128 18:30:00.153868 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="registry-server" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.153874 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="registry-server" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.154064 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad99f0bf-4e34-4b52-a4ce-65565bc153a4" containerName="registry-server" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.154082 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d28c7e85-6453-41d7-a3e9-25eb14048e03" containerName="registry-server" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.155094 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.157602 4811 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.157620 4811 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.173248 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6"] Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.231130 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b7acfb9-e881-46c2-a571-b89664a5e7e2-secret-volume\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.231328 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjngl\" (UniqueName: \"kubernetes.io/projected/4b7acfb9-e881-46c2-a571-b89664a5e7e2-kube-api-access-rjngl\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.231405 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b7acfb9-e881-46c2-a571-b89664a5e7e2-config-volume\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.333884 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b7acfb9-e881-46c2-a571-b89664a5e7e2-secret-volume\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.334945 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjngl\" (UniqueName: \"kubernetes.io/projected/4b7acfb9-e881-46c2-a571-b89664a5e7e2-kube-api-access-rjngl\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.335150 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b7acfb9-e881-46c2-a571-b89664a5e7e2-config-volume\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.336322 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b7acfb9-e881-46c2-a571-b89664a5e7e2-config-volume\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.344127 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b7acfb9-e881-46c2-a571-b89664a5e7e2-secret-volume\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.363809 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjngl\" (UniqueName: \"kubernetes.io/projected/4b7acfb9-e881-46c2-a571-b89664a5e7e2-kube-api-access-rjngl\") pod \"collect-profiles-29493750-6xtp6\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:00 crc kubenswrapper[4811]: I0128 18:30:00.472926 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:01 crc kubenswrapper[4811]: I0128 18:30:01.010743 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6"] Jan 28 18:30:01 crc kubenswrapper[4811]: I0128 18:30:01.303942 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" event={"ID":"4b7acfb9-e881-46c2-a571-b89664a5e7e2","Type":"ContainerStarted","Data":"866562b50ccd2e9203d7f05c0f19b5548d5faaf0a86f6d93056e3b5d6d496757"} Jan 28 18:30:01 crc kubenswrapper[4811]: I0128 18:30:01.304382 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" event={"ID":"4b7acfb9-e881-46c2-a571-b89664a5e7e2","Type":"ContainerStarted","Data":"38e37308b65bb79eb851cfc052c6956d958d10274bcfebc75e32c6d4cf336005"} Jan 28 18:30:01 crc kubenswrapper[4811]: I0128 18:30:01.328937 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" podStartSLOduration=1.3289155209999999 podStartE2EDuration="1.328915521s" podCreationTimestamp="2026-01-28 18:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-28 18:30:01.322963651 +0000 UTC m=+9894.077327244" watchObservedRunningTime="2026-01-28 18:30:01.328915521 +0000 UTC m=+9894.083279104" Jan 28 18:30:02 crc kubenswrapper[4811]: I0128 18:30:02.322553 4811 generic.go:334] "Generic (PLEG): container finished" podID="4b7acfb9-e881-46c2-a571-b89664a5e7e2" containerID="866562b50ccd2e9203d7f05c0f19b5548d5faaf0a86f6d93056e3b5d6d496757" exitCode=0 Jan 28 18:30:02 crc kubenswrapper[4811]: I0128 18:30:02.322875 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" event={"ID":"4b7acfb9-e881-46c2-a571-b89664a5e7e2","Type":"ContainerDied","Data":"866562b50ccd2e9203d7f05c0f19b5548d5faaf0a86f6d93056e3b5d6d496757"} Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.792163 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.924122 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjngl\" (UniqueName: \"kubernetes.io/projected/4b7acfb9-e881-46c2-a571-b89664a5e7e2-kube-api-access-rjngl\") pod \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.924210 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b7acfb9-e881-46c2-a571-b89664a5e7e2-secret-volume\") pod \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.924335 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b7acfb9-e881-46c2-a571-b89664a5e7e2-config-volume\") pod \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\" (UID: \"4b7acfb9-e881-46c2-a571-b89664a5e7e2\") " Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.925310 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b7acfb9-e881-46c2-a571-b89664a5e7e2-config-volume" (OuterVolumeSpecName: "config-volume") pod "4b7acfb9-e881-46c2-a571-b89664a5e7e2" (UID: "4b7acfb9-e881-46c2-a571-b89664a5e7e2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.944133 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b7acfb9-e881-46c2-a571-b89664a5e7e2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4b7acfb9-e881-46c2-a571-b89664a5e7e2" (UID: "4b7acfb9-e881-46c2-a571-b89664a5e7e2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 28 18:30:03 crc kubenswrapper[4811]: I0128 18:30:03.946358 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b7acfb9-e881-46c2-a571-b89664a5e7e2-kube-api-access-rjngl" (OuterVolumeSpecName: "kube-api-access-rjngl") pod "4b7acfb9-e881-46c2-a571-b89664a5e7e2" (UID: "4b7acfb9-e881-46c2-a571-b89664a5e7e2"). InnerVolumeSpecName "kube-api-access-rjngl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.027466 4811 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b7acfb9-e881-46c2-a571-b89664a5e7e2-config-volume\") on node \"crc\" DevicePath \"\"" Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.027507 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjngl\" (UniqueName: \"kubernetes.io/projected/4b7acfb9-e881-46c2-a571-b89664a5e7e2-kube-api-access-rjngl\") on node \"crc\" DevicePath \"\"" Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.027521 4811 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b7acfb9-e881-46c2-a571-b89664a5e7e2-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.356405 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" event={"ID":"4b7acfb9-e881-46c2-a571-b89664a5e7e2","Type":"ContainerDied","Data":"38e37308b65bb79eb851cfc052c6956d958d10274bcfebc75e32c6d4cf336005"} Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.356764 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38e37308b65bb79eb851cfc052c6956d958d10274bcfebc75e32c6d4cf336005" Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.356521 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29493750-6xtp6" Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.418306 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598"] Jan 28 18:30:04 crc kubenswrapper[4811]: I0128 18:30:04.431088 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29493705-lm598"] Jan 28 18:30:06 crc kubenswrapper[4811]: I0128 18:30:06.365166 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be69614c-cb2d-44c4-9ff1-bec81201f8b2" path="/var/lib/kubelet/pods/be69614c-cb2d-44c4-9ff1-bec81201f8b2/volumes" Jan 28 18:30:11 crc kubenswrapper[4811]: I0128 18:30:11.339727 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:30:11 crc kubenswrapper[4811]: E0128 18:30:11.340514 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:30:22 crc kubenswrapper[4811]: I0128 18:30:22.341417 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:30:22 crc kubenswrapper[4811]: E0128 18:30:22.342506 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:30:28 crc kubenswrapper[4811]: I0128 18:30:28.456363 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6f828c92-3711-4acd-a17b-e7f3a6fd75e8/init-config-reloader/0.log" Jan 28 18:30:28 crc kubenswrapper[4811]: I0128 18:30:28.701755 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6f828c92-3711-4acd-a17b-e7f3a6fd75e8/alertmanager/0.log" Jan 28 18:30:28 crc kubenswrapper[4811]: I0128 18:30:28.761783 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6f828c92-3711-4acd-a17b-e7f3a6fd75e8/config-reloader/0.log" Jan 28 18:30:28 crc kubenswrapper[4811]: I0128 18:30:28.775346 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6f828c92-3711-4acd-a17b-e7f3a6fd75e8/init-config-reloader/0.log" Jan 28 18:30:29 crc kubenswrapper[4811]: I0128 18:30:29.663508 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_0fd0af80-eaab-4937-b501-6b8f67cdce10/aodh-api/0.log" Jan 28 18:30:29 crc kubenswrapper[4811]: I0128 18:30:29.687308 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_0fd0af80-eaab-4937-b501-6b8f67cdce10/aodh-listener/0.log" Jan 28 18:30:29 crc kubenswrapper[4811]: I0128 18:30:29.744136 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_0fd0af80-eaab-4937-b501-6b8f67cdce10/aodh-evaluator/0.log" Jan 28 18:30:29 crc kubenswrapper[4811]: I0128 18:30:29.888622 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_0fd0af80-eaab-4937-b501-6b8f67cdce10/aodh-notifier/0.log" Jan 28 18:30:29 crc kubenswrapper[4811]: I0128 18:30:29.929154 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57f9b55cfb-vwvv2_b0d2592a-6e6a-4415-a1ef-5b026f8306ef/barbican-api/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.026919 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57f9b55cfb-vwvv2_b0d2592a-6e6a-4415-a1ef-5b026f8306ef/barbican-api-log/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.143623 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-854cc966bb-g5fl8_8ed15b45-fa16-49de-b10c-3a363f820d2e/barbican-keystone-listener/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.173216 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-854cc966bb-g5fl8_8ed15b45-fa16-49de-b10c-3a363f820d2e/barbican-keystone-listener-log/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.378028 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6fb878c849-8qtsg_061ac65f-6ccc-4efc-bc6c-75cf3b355e8d/barbican-worker-log/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.404409 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6fb878c849-8qtsg_061ac65f-6ccc-4efc-bc6c-75cf3b355e8d/barbican-worker/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.525947 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-n5sjm_0f2767ed-1bc7-4828-8084-29e418f02fbb/bootstrap-openstack-openstack-cell1/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.606569 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_840f9dd7-e272-4eca-8c71-8b8b27a34ef3/ceilometer-central-agent/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.684817 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_840f9dd7-e272-4eca-8c71-8b8b27a34ef3/ceilometer-notification-agent/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.756279 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_840f9dd7-e272-4eca-8c71-8b8b27a34ef3/proxy-httpd/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.779764 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_840f9dd7-e272-4eca-8c71-8b8b27a34ef3/sg-core/0.log" Jan 28 18:30:30 crc kubenswrapper[4811]: I0128 18:30:30.941565 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-xfg22_f31d8eab-f8a5-415c-a3c5-a092ab9d99e0/ceph-client-openstack-openstack-cell1/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.047173 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4e7123fa-834f-4624-9261-e36297a71ce0/cinder-api/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.137305 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4e7123fa-834f-4624-9261-e36297a71ce0/cinder-api-log/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.395370 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_8df06644-e477-431f-893c-ebabbc754a77/probe/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.398822 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_8df06644-e477-431f-893c-ebabbc754a77/cinder-backup/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.529216 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_e0e61975-afaa-4739-8174-344e50e4c21a/cinder-scheduler/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.635649 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_e0e61975-afaa-4739-8174-344e50e4c21a/probe/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.678611 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4/cinder-volume/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.758933 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c4f66ea0-a4fb-44c3-8390-4bbdedeb61b4/probe/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.857065 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-sdml6_15a95376-2b63-44f6-aca3-aef9692a3fdd/configure-network-openstack-openstack-cell1/0.log" Jan 28 18:30:31 crc kubenswrapper[4811]: I0128 18:30:31.935213 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-jwm6n_04a4df18-f769-4741-91e3-734245e3f14f/configure-os-openstack-openstack-cell1/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.076144 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-858dc4dc9f-qwrkt_53564505-e54d-4d5f-84ba-fae0c13c89cb/init/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.251013 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-858dc4dc9f-qwrkt_53564505-e54d-4d5f-84ba-fae0c13c89cb/init/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.286263 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-858dc4dc9f-qwrkt_53564505-e54d-4d5f-84ba-fae0c13c89cb/dnsmasq-dns/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.307459 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-v5bff_2758f1a1-4474-4964-98eb-5046d0474346/download-cache-openstack-openstack-cell1/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.469007 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_6b46e8e8-aeb5-49f1-a79a-9dce013bddf0/glance-log/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.518999 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_6b46e8e8-aeb5-49f1-a79a-9dce013bddf0/glance-httpd/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.589100 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_62295afa-2362-41cd-95f4-8b1519e053d4/glance-log/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.610186 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_62295afa-2362-41cd-95f4-8b1519e053d4/glance-httpd/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.848007 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-c69bf9c77-tj5lc_5b54a3b7-5ff0-4b4c-ae89-a052aa412ebe/heat-api/0.log" Jan 28 18:30:32 crc kubenswrapper[4811]: I0128 18:30:32.897300 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-6f666d6f4f-6sf5f_5108be06-d041-4e4d-9655-1de093f3b10a/heat-cfnapi/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.022330 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-57b6c595c4-c879c_7efd6d3f-0f9f-4096-a40f-37610326c0a4/heat-engine/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.282356 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-rcl46_fbb46d38-4318-4b82-a99a-a165a0b78e00/install-certs-openstack-openstack-cell1/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.282961 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7877575757-66k4b_54589627-ab35-452b-8c83-75608ebfe0f4/horizon-log/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.298084 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7877575757-66k4b_54589627-ab35-452b-8c83-75608ebfe0f4/horizon/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.493051 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-q85zb_93b3e1e5-a2ca-4f8d-b0b9-c66f8783ad7a/install-os-openstack-openstack-cell1/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.686318 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29493721-rc9sk_fbd629a4-a54d-4748-99f1-db9f8ed0a197/keystone-cron/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.686582 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-56f69fc7cc-s5j8w_44877e6c-40f0-4f95-93f3-18bfd195a9bb/keystone-api/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.785131 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_4860d3b1-9c47-4438-9a23-1fb475ac7853/kube-state-metrics/0.log" Jan 28 18:30:33 crc kubenswrapper[4811]: I0128 18:30:33.951523 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-rszk6_b6ea447f-728c-4c66-b2d3-04009c00e284/libvirt-openstack-openstack-cell1/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.023968 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_be0c37b7-c1d7-415e-9dc2-2351411340ca/manila-api-log/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.151509 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_be0c37b7-c1d7-415e-9dc2-2351411340ca/manila-api/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.210259 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_c60281fa-f2b2-4062-a046-61f47a004598/probe/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.261717 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_c60281fa-f2b2-4062-a046-61f47a004598/manila-scheduler/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.409813 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e98f0d31-d277-4641-af76-90f345e51dba/manila-share/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.453578 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e98f0d31-d277-4641-af76-90f345e51dba/probe/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.498145 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_b41b9853-60d7-4111-86b2-5f6e4f9d242f/adoption/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.899061 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7f987646bf-7rgxf_9a726314-a1bb-4a46-8cee-03d7cbe8e9fb/neutron-api/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.928033 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7f987646bf-7rgxf_9a726314-a1bb-4a46-8cee-03d7cbe8e9fb/neutron-httpd/0.log" Jan 28 18:30:34 crc kubenswrapper[4811]: I0128 18:30:34.989728 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-d2jdx_b42afac6-403d-45e8-9d45-23c58fbe0dfa/neutron-dhcp-openstack-openstack-cell1/0.log" Jan 28 18:30:35 crc kubenswrapper[4811]: I0128 18:30:35.849894 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-7gc2q_5e48a373-7bb4-4d87-9c9d-7ad8dd784e1b/neutron-sriov-openstack-openstack-cell1/0.log" Jan 28 18:30:35 crc kubenswrapper[4811]: I0128 18:30:35.853726 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-mcwdt_9217c9b6-59ad-41ca-afa6-b11cb6a1611a/neutron-metadata-openstack-openstack-cell1/0.log" Jan 28 18:30:36 crc kubenswrapper[4811]: I0128 18:30:36.412681 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_17ab8273-ae23-4456-a6db-803f2bb4cbd1/nova-api-log/0.log" Jan 28 18:30:36 crc kubenswrapper[4811]: I0128 18:30:36.422767 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_17ab8273-ae23-4456-a6db-803f2bb4cbd1/nova-api-api/0.log" Jan 28 18:30:36 crc kubenswrapper[4811]: I0128 18:30:36.561862 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_83d34ccd-1f43-44ad-962d-f9644e97dc18/nova-cell0-conductor-conductor/0.log" Jan 28 18:30:36 crc kubenswrapper[4811]: I0128 18:30:36.693018 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_0a87ce7b-6797-434f-95c9-b5840e035560/nova-cell1-conductor-conductor/0.log" Jan 28 18:30:36 crc kubenswrapper[4811]: I0128 18:30:36.905080 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_915b65f1-d386-4113-b209-767e3d1d1f07/nova-cell1-novncproxy-novncproxy/0.log" Jan 28 18:30:37 crc kubenswrapper[4811]: I0128 18:30:37.339876 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:30:37 crc kubenswrapper[4811]: E0128 18:30:37.340148 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:30:37 crc kubenswrapper[4811]: I0128 18:30:37.462051 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellsnjr4_8df61c74-a922-4222-a4d1-64f4d871c1cd/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Jan 28 18:30:37 crc kubenswrapper[4811]: I0128 18:30:37.468664 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-k5j98_f7f2d4fc-f0c1-43a5-adfe-2a91386f5170/nova-cell1-openstack-openstack-cell1/0.log" Jan 28 18:30:37 crc kubenswrapper[4811]: I0128 18:30:37.672932 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cd902fd7-3d4d-4f73-844e-f1cdb8f59645/nova-metadata-log/0.log" Jan 28 18:30:37 crc kubenswrapper[4811]: I0128 18:30:37.842640 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cd902fd7-3d4d-4f73-844e-f1cdb8f59645/nova-metadata-metadata/0.log" Jan 28 18:30:38 crc kubenswrapper[4811]: I0128 18:30:38.057405 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-98d866f5b-p8btt_4634aca9-e4c6-4fbb-aba2-ab011d320ca0/init/0.log" Jan 28 18:30:38 crc kubenswrapper[4811]: I0128 18:30:38.057675 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f7810437-dc40-47ba-b13c-02cdc7ca85ad/nova-scheduler-scheduler/0.log" Jan 28 18:30:38 crc kubenswrapper[4811]: I0128 18:30:38.290008 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-98d866f5b-p8btt_4634aca9-e4c6-4fbb-aba2-ab011d320ca0/init/0.log" Jan 28 18:30:38 crc kubenswrapper[4811]: I0128 18:30:38.305247 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-98d866f5b-p8btt_4634aca9-e4c6-4fbb-aba2-ab011d320ca0/octavia-api-provider-agent/0.log" Jan 28 18:30:38 crc kubenswrapper[4811]: I0128 18:30:38.578228 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-98d866f5b-p8btt_4634aca9-e4c6-4fbb-aba2-ab011d320ca0/octavia-api/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.104523 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-g7hvz_78c1e707-1148-4a30-b900-7bf0ffc947d7/init/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.175776 4811 scope.go:117] "RemoveContainer" containerID="5877777ba5409a8778414ab059aeb96fa5c0038bdb8c37c733fb75ea9a9624c8" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.414451 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-g7hvz_78c1e707-1148-4a30-b900-7bf0ffc947d7/init/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.482990 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-9lwmc_77459561-f220-4376-b309-8cb4e7b06f43/init/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.728738 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-g7hvz_78c1e707-1148-4a30-b900-7bf0ffc947d7/octavia-healthmanager/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.743529 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-9lwmc_77459561-f220-4376-b309-8cb4e7b06f43/init/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.774991 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-9lwmc_77459561-f220-4376-b309-8cb4e7b06f43/octavia-housekeeping/0.log" Jan 28 18:30:39 crc kubenswrapper[4811]: I0128 18:30:39.986314 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-65dd99cb46-4zxm4_19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb/init/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.116382 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-65dd99cb46-4zxm4_19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb/octavia-amphora-httpd/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.150412 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-65dd99cb46-4zxm4_19dc43fa-6aa7-4c8b-a693-9ea0c00cb5eb/init/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.236491 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-nc627_88cbdf9d-4e65-4269-bf33-ad4a6842812a/init/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.415275 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-nc627_88cbdf9d-4e65-4269-bf33-ad4a6842812a/init/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.441496 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-nc627_88cbdf9d-4e65-4269-bf33-ad4a6842812a/octavia-rsyslog/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.460468 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-sbqvf_3d120144-e100-4514-9e1f-7e6fb95c26dc/init/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.733046 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-sbqvf_3d120144-e100-4514-9e1f-7e6fb95c26dc/init/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.849875 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_36dc7233-7d69-4de6-ac18-3c7e87a7b21c/mysql-bootstrap/0.log" Jan 28 18:30:40 crc kubenswrapper[4811]: I0128 18:30:40.983466 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-sbqvf_3d120144-e100-4514-9e1f-7e6fb95c26dc/octavia-worker/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.038281 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_36dc7233-7d69-4de6-ac18-3c7e87a7b21c/galera/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.043450 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_36dc7233-7d69-4de6-ac18-3c7e87a7b21c/mysql-bootstrap/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.196155 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_53fb2b01-0aa4-4d3a-8395-e1f0399802d4/mysql-bootstrap/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.379957 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_00e32ab2-c148-4f2b-b965-ea60e4d2abc4/openstackclient/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.439374 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_53fb2b01-0aa4-4d3a-8395-e1f0399802d4/mysql-bootstrap/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.491297 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_53fb2b01-0aa4-4d3a-8395-e1f0399802d4/galera/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.635698 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-6zndg_7bf5f5e6-489a-4e4a-9974-e0d4c029534e/ovn-controller/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.876027 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-99r2m_d595e34c-15d9-4b48-b9d9-06d8baa95833/openstack-network-exporter/0.log" Jan 28 18:30:41 crc kubenswrapper[4811]: I0128 18:30:41.992620 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4w45h_bfb5a500-7066-4649-9116-571b80da5d8d/ovsdb-server-init/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.167780 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4w45h_bfb5a500-7066-4649-9116-571b80da5d8d/ovsdb-server-init/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.171361 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4w45h_bfb5a500-7066-4649-9116-571b80da5d8d/ovsdb-server/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.197398 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4w45h_bfb5a500-7066-4649-9116-571b80da5d8d/ovs-vswitchd/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.390500 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_3a0ba99c-11cb-4141-b009-c28e97e90203/adoption/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.489863 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5c66a38a-138b-4a1a-a44e-fe517ad7ae99/openstack-network-exporter/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.539962 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5c66a38a-138b-4a1a-a44e-fe517ad7ae99/ovn-northd/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.766296 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-vhk7d_ce47cd85-cbaa-476c-befa-5cbcc696525d/ovn-openstack-openstack-cell1/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.833549 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_5096fd9b-16e4-4922-a9a3-d3341a651a6b/openstack-network-exporter/0.log" Jan 28 18:30:42 crc kubenswrapper[4811]: I0128 18:30:42.982183 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_5096fd9b-16e4-4922-a9a3-d3341a651a6b/ovsdbserver-nb/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.047752 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_03cac693-8c06-4b2f-b025-6f10db9593cf/ovsdbserver-nb/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.063576 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_03cac693-8c06-4b2f-b025-6f10db9593cf/openstack-network-exporter/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.252017 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_0628dfb9-d653-4e10-922b-7f2633499758/openstack-network-exporter/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.331552 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_0628dfb9-d653-4e10-922b-7f2633499758/ovsdbserver-nb/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.451314 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e8130062-77f2-441c-b3a7-ee6da0fba334/openstack-network-exporter/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.458940 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e8130062-77f2-441c-b3a7-ee6da0fba334/ovsdbserver-sb/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.566013 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_9eed1297-7bf1-4b5a-a55d-36219d222d5e/openstack-network-exporter/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.738722 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a106cdcc-25f5-4c62-aac6-1d9a4cfec240/openstack-network-exporter/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.748456 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_9eed1297-7bf1-4b5a-a55d-36219d222d5e/ovsdbserver-sb/0.log" Jan 28 18:30:43 crc kubenswrapper[4811]: I0128 18:30:43.879639 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a106cdcc-25f5-4c62-aac6-1d9a4cfec240/ovsdbserver-sb/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.086691 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7f44878d9d-49bh8_2fede79a-e5e1-4a4d-93d0-de61e60c2e43/placement-api/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.106619 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7f44878d9d-49bh8_2fede79a-e5e1-4a4d-93d0-de61e60c2e43/placement-log/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.242646 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cqr6j6_b4d54df3-1c51-49c0-af4a-0f02bd87d0e0/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.373468 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_02f75eea-3b22-4f21-8f2c-c3f93423f33f/init-config-reloader/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.554825 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_02f75eea-3b22-4f21-8f2c-c3f93423f33f/config-reloader/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.603406 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_02f75eea-3b22-4f21-8f2c-c3f93423f33f/prometheus/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.611020 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_02f75eea-3b22-4f21-8f2c-c3f93423f33f/init-config-reloader/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.644941 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_02f75eea-3b22-4f21-8f2c-c3f93423f33f/thanos-sidecar/0.log" Jan 28 18:30:44 crc kubenswrapper[4811]: I0128 18:30:44.819965 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7cd6fbc-98b4-42ff-aa55-b9608806b823/setup-container/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.068848 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7cd6fbc-98b4-42ff-aa55-b9608806b823/setup-container/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.131758 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_22b21d12-b525-43e8-a649-a65f4be978ca/setup-container/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.165687 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7cd6fbc-98b4-42ff-aa55-b9608806b823/rabbitmq/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.309840 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_22b21d12-b525-43e8-a649-a65f4be978ca/setup-container/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.392857 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-qhlxf_2ac47199-b6b7-4461-b5f9-d5901cbad638/reboot-os-openstack-openstack-cell1/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.465962 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_22b21d12-b525-43e8-a649-a65f4be978ca/rabbitmq/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.610005 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-wsjt7_cdc346e5-3330-426d-83e8-1ba1c3ff709b/run-os-openstack-openstack-cell1/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.645642 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_891a977b-9794-4c64-aabc-90555d95f8b3/memcached/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.738258 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-nxrrf_124756d7-a566-4c29-a30a-cb96ddf1dd6b/ssh-known-hosts-openstack/0.log" Jan 28 18:30:45 crc kubenswrapper[4811]: I0128 18:30:45.852079 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-pgzw9_b594ffd2-2b1d-40c4-9d18-9831dd6c9816/telemetry-openstack-openstack-cell1/0.log" Jan 28 18:30:46 crc kubenswrapper[4811]: I0128 18:30:46.197073 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-wtkwb_69e817eb-c2f6-4173-be32-04233e446173/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Jan 28 18:30:46 crc kubenswrapper[4811]: I0128 18:30:46.270717 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-4mwzh_d86a0acd-0347-442a-b9dc-ccbd1ef895e7/validate-network-openstack-openstack-cell1/0.log" Jan 28 18:30:50 crc kubenswrapper[4811]: I0128 18:30:50.339754 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:30:50 crc kubenswrapper[4811]: E0128 18:30:50.340854 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:31:04 crc kubenswrapper[4811]: I0128 18:31:04.340069 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:31:04 crc kubenswrapper[4811]: E0128 18:31:04.340831 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.228030 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7f86f8796f-flj9q_0cf7363c-9c82-4ab6-bd1c-d8cff679a3b3/manager/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.433989 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/util/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.599814 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/util/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.671724 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/pull/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.671984 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/pull/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.874581 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/pull/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.879113 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/util/0.log" Jan 28 18:31:11 crc kubenswrapper[4811]: I0128 18:31:11.941234 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c63724877025f1d18ba1fa29f8076dfd209b6bb3b67e44a6aa3755fab2vclvc_93b3aa9c-9717-4f9d-b535-eceb93b7eb9b/extract/0.log" Jan 28 18:31:12 crc kubenswrapper[4811]: I0128 18:31:12.141983 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7478f7dbf9-zqk5g_5d2909d9-5499-4119-886c-d389257e85fa/manager/0.log" Jan 28 18:31:12 crc kubenswrapper[4811]: I0128 18:31:12.157952 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-9k7l8_f8bca6a7-6aca-4dd1-8c9d-a358f932eab3/manager/0.log" Jan 28 18:31:12 crc kubenswrapper[4811]: I0128 18:31:12.475778 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-t4nkf_4fe875df-573b-4f98-bcaa-2f72a2a409cb/manager/0.log" Jan 28 18:31:12 crc kubenswrapper[4811]: I0128 18:31:12.538424 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-drtqb_4621fc20-e68f-4c29-a01b-e62b2eda190c/manager/0.log" Jan 28 18:31:12 crc kubenswrapper[4811]: I0128 18:31:12.708206 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-q5dc6_f6539037-4781-4a9d-b13a-80e9c7f178ac/manager/0.log" Jan 28 18:31:13 crc kubenswrapper[4811]: I0128 18:31:13.024964 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-598f7747c9-v4znb_da7d2484-74a9-4915-b825-9c35586331e4/manager/0.log" Jan 28 18:31:13 crc kubenswrapper[4811]: I0128 18:31:13.296879 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-jxcrp_5e5c9061-59c7-4984-a93f-a61ef1b582ba/manager/0.log" Jan 28 18:31:13 crc kubenswrapper[4811]: I0128 18:31:13.389541 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-dr9ff_66f46681-a51b-4c9e-9cc0-0a604a8ce3c0/manager/0.log" Jan 28 18:31:13 crc kubenswrapper[4811]: I0128 18:31:13.451593 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-694cf4f878-92chv_e9614b09-655d-4426-8b74-772aa777cf4d/manager/0.log" Jan 28 18:31:13 crc kubenswrapper[4811]: I0128 18:31:13.663945 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6b9fb5fdcb-trvzh_787f4d88-cda8-4515-a1b8-db763a44554e/manager/0.log" Jan 28 18:31:13 crc kubenswrapper[4811]: I0128 18:31:13.740329 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78d58447c5-9x8xg_fc9fd651-c991-4202-911d-26e9b037c636/manager/0.log" Jan 28 18:31:14 crc kubenswrapper[4811]: I0128 18:31:14.304008 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-5f4cd88d46-5kczk_12256989-9520-4379-9e55-3c11ab97993c/manager/0.log" Jan 28 18:31:14 crc kubenswrapper[4811]: I0128 18:31:14.452222 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7bdb645866-mv6x7_7b0b495e-25ab-48c9-9ee7-494295ed7316/manager/0.log" Jan 28 18:31:14 crc kubenswrapper[4811]: I0128 18:31:14.521026 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5b5d4999dcbp9gd_59c4af9d-14e6-4d10-aa40-5f300ca20242/manager/0.log" Jan 28 18:31:15 crc kubenswrapper[4811]: I0128 18:31:15.016774 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-554f878768-n49vx_aa18955f-899e-417a-8bd5-19a0b7e809e3/operator/0.log" Jan 28 18:31:15 crc kubenswrapper[4811]: I0128 18:31:15.232726 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-v8ftk_249e1e57-08ef-4961-b1a0-f95207b38667/registry-server/0.log" Jan 28 18:31:15 crc kubenswrapper[4811]: I0128 18:31:15.427202 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f75f45d54-jxjn5_ec09ccdf-ab1a-46ef-9a34-0d25f21e5205/manager/0.log" Jan 28 18:31:15 crc kubenswrapper[4811]: I0128 18:31:15.666484 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-79d5ccc684-znx6t_c2dcc00d-c6c6-4546-8aed-b1ccceb9534f/manager/0.log" Jan 28 18:31:15 crc kubenswrapper[4811]: I0128 18:31:15.698733 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-8vvwj_7808e709-5b75-45bb-bf62-5756261a4279/operator/0.log" Jan 28 18:31:16 crc kubenswrapper[4811]: I0128 18:31:16.063160 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-r7j2f_6fe8a24f-c7b1-4b27-908e-996e66803e0a/manager/0.log" Jan 28 18:31:16 crc kubenswrapper[4811]: I0128 18:31:16.233564 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-22cf4_548c9a9e-14a4-44a0-a7d0-f4ccca0bcdd0/manager/0.log" Jan 28 18:31:16 crc kubenswrapper[4811]: I0128 18:31:16.540234 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-lxmgs_003629ab-e0b0-49b2-a4ab-f9cf7b67b588/manager/0.log" Jan 28 18:31:16 crc kubenswrapper[4811]: I0128 18:31:16.606169 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-gkhtq_def156ed-ad39-4f23-aa51-bd1d36b35543/manager/0.log" Jan 28 18:31:17 crc kubenswrapper[4811]: I0128 18:31:17.604321 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-9f67d7-pqwwm_1218f34b-3146-4128-a086-4848855a5c92/manager/0.log" Jan 28 18:31:19 crc kubenswrapper[4811]: I0128 18:31:19.341564 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:31:19 crc kubenswrapper[4811]: E0128 18:31:19.342525 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:31:34 crc kubenswrapper[4811]: I0128 18:31:34.340468 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:31:34 crc kubenswrapper[4811]: E0128 18:31:34.341142 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:31:40 crc kubenswrapper[4811]: I0128 18:31:40.648635 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-shsbw_c6d392c6-2fcb-4d68-81da-6cb52a6a5e8f/control-plane-machine-set-operator/0.log" Jan 28 18:31:40 crc kubenswrapper[4811]: I0128 18:31:40.833189 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-c4zgz_0439276f-d9ad-40f1-aa18-d05a1ee2b768/kube-rbac-proxy/0.log" Jan 28 18:31:40 crc kubenswrapper[4811]: I0128 18:31:40.840115 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-c4zgz_0439276f-d9ad-40f1-aa18-d05a1ee2b768/machine-api-operator/0.log" Jan 28 18:31:47 crc kubenswrapper[4811]: I0128 18:31:47.341133 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:31:47 crc kubenswrapper[4811]: E0128 18:31:47.341931 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:31:55 crc kubenswrapper[4811]: I0128 18:31:55.781956 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-ldbdm_d01d6ff4-be2c-42c2-92a6-fe8550d989b8/cert-manager-controller/0.log" Jan 28 18:31:56 crc kubenswrapper[4811]: I0128 18:31:56.000581 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-hz2ds_3cbb2167-726d-4f37-a7bf-865d4e49c4f8/cert-manager-cainjector/0.log" Jan 28 18:31:56 crc kubenswrapper[4811]: I0128 18:31:56.125741 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-thcwr_68ad0985-ed35-40bc-8606-6ea7ad59be62/cert-manager-webhook/0.log" Jan 28 18:31:58 crc kubenswrapper[4811]: I0128 18:31:58.346986 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:31:58 crc kubenswrapper[4811]: E0128 18:31:58.347584 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:32:09 crc kubenswrapper[4811]: I0128 18:32:09.342498 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:32:09 crc kubenswrapper[4811]: E0128 18:32:09.343366 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:32:11 crc kubenswrapper[4811]: I0128 18:32:11.737638 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-ncskp_742554e2-f5ea-4701-b8cf-c81f6dd0ae1b/nmstate-console-plugin/0.log" Jan 28 18:32:11 crc kubenswrapper[4811]: I0128 18:32:11.907846 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-p8pwn_51327a90-d59d-4d64-899c-5ba26ad301c3/nmstate-handler/0.log" Jan 28 18:32:11 crc kubenswrapper[4811]: I0128 18:32:11.986759 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-959qf_a1a9218c-ccf7-4ded-812b-6d3fe2db137c/kube-rbac-proxy/0.log" Jan 28 18:32:12 crc kubenswrapper[4811]: I0128 18:32:12.059135 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-959qf_a1a9218c-ccf7-4ded-812b-6d3fe2db137c/nmstate-metrics/0.log" Jan 28 18:32:12 crc kubenswrapper[4811]: I0128 18:32:12.273474 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-fqkmg_0f288174-12e8-4627-9106-3f96f5368b58/nmstate-webhook/0.log" Jan 28 18:32:12 crc kubenswrapper[4811]: I0128 18:32:12.275940 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-wt5f7_130754b8-3e75-4b70-b0a9-92f912c250f0/nmstate-operator/0.log" Jan 28 18:32:20 crc kubenswrapper[4811]: I0128 18:32:20.340113 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:32:20 crc kubenswrapper[4811]: E0128 18:32:20.340946 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:32:29 crc kubenswrapper[4811]: I0128 18:32:29.040617 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-wpqgl_05eff175-d1b2-4fde-b7ff-c94da0a5af2a/prometheus-operator/0.log" Jan 28 18:32:29 crc kubenswrapper[4811]: I0128 18:32:29.222991 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-64974df957-c8hws_ad3a640d-e3e1-4a4f-ad76-e728f8b73214/prometheus-operator-admission-webhook/0.log" Jan 28 18:32:29 crc kubenswrapper[4811]: I0128 18:32:29.316570 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-64974df957-pnkd5_c4f3ab08-1768-4114-98e0-646b6657c0ec/prometheus-operator-admission-webhook/0.log" Jan 28 18:32:29 crc kubenswrapper[4811]: I0128 18:32:29.437169 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-zfnch_0f9309ad-f7c3-4e57-a1e4-fba21275d588/operator/0.log" Jan 28 18:32:30 crc kubenswrapper[4811]: I0128 18:32:30.338507 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-8fslw_7c526f6a-a27b-4d38-b4a1-c70fed01753f/perses-operator/0.log" Jan 28 18:32:35 crc kubenswrapper[4811]: I0128 18:32:35.340022 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:32:36 crc kubenswrapper[4811]: I0128 18:32:36.203324 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"5de93d3b64bb728d8830015da463664bfdb31100434662da7cfd020c44890fc2"} Jan 28 18:32:46 crc kubenswrapper[4811]: I0128 18:32:46.493787 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-br9ll_0039a23f-73c0-40c3-9c79-3d747d7d9896/kube-rbac-proxy/0.log" Jan 28 18:32:46 crc kubenswrapper[4811]: I0128 18:32:46.768270 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-frr-files/0.log" Jan 28 18:32:46 crc kubenswrapper[4811]: I0128 18:32:46.782584 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-br9ll_0039a23f-73c0-40c3-9c79-3d747d7d9896/controller/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.000664 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-reloader/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.010950 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-metrics/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.026655 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-frr-files/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.038336 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-reloader/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.216397 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-frr-files/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.262023 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-reloader/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.297278 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-metrics/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.297286 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-metrics/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.515321 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/controller/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.517841 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-reloader/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.535637 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-frr-files/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.535775 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/cp-metrics/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.713649 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/kube-rbac-proxy-frr/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.736882 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/kube-rbac-proxy/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.772013 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/frr-metrics/0.log" Jan 28 18:32:47 crc kubenswrapper[4811]: I0128 18:32:47.919413 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/reloader/0.log" Jan 28 18:32:48 crc kubenswrapper[4811]: I0128 18:32:48.065581 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-kxvh5_f1bfcb26-2369-429c-bf4d-1c4234d9c28c/frr-k8s-webhook-server/0.log" Jan 28 18:32:48 crc kubenswrapper[4811]: I0128 18:32:48.268646 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6fffdf7c57-wb2qh_a123b5e8-6e64-4200-8a29-e49d7402cf8f/manager/0.log" Jan 28 18:32:48 crc kubenswrapper[4811]: I0128 18:32:48.480801 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-75fc857c96-mkr4b_78d59be8-2ce7-49b2-80ed-42ec32101487/webhook-server/0.log" Jan 28 18:32:48 crc kubenswrapper[4811]: I0128 18:32:48.556888 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-642th_28b5a61d-bf80-4508-bb79-a6791d1c6d57/kube-rbac-proxy/0.log" Jan 28 18:32:49 crc kubenswrapper[4811]: I0128 18:32:49.580089 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-642th_28b5a61d-bf80-4508-bb79-a6791d1c6d57/speaker/0.log" Jan 28 18:32:51 crc kubenswrapper[4811]: I0128 18:32:51.005609 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jn9r9_65b6bd46-8c0d-449a-a7b6-42c8c7933f64/frr/0.log" Jan 28 18:33:03 crc kubenswrapper[4811]: I0128 18:33:03.830620 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/util/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.030424 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/util/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.044493 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/pull/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.150246 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/pull/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.340016 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/util/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.363076 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/pull/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.395347 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a466jb_720ab524-c1a3-4203-8533-1432535163fc/extract/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.552802 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/util/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.713946 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/util/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.721078 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/pull/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.759147 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/pull/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.885992 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/util/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.920137 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/extract/0.log" Jan 28 18:33:04 crc kubenswrapper[4811]: I0128 18:33:04.930731 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccpp6m_97d2fe47-1c05-4300-a476-1bf42f0f6c5a/pull/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.099081 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/util/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.286963 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/pull/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.293649 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/pull/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.310260 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/util/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.511194 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/pull/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.549329 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/extract/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.564862 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713nvd5c_4a2037fd-b35c-4712-9b1f-8cc7586fcabe/util/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.689633 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/util/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.857135 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/pull/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.902145 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/util/0.log" Jan 28 18:33:05 crc kubenswrapper[4811]: I0128 18:33:05.909347 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/pull/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.042751 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/pull/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.045940 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/util/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.083313 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08mg8ws_5a01ed82-4f73-4fc5-8f81-b795ed4877c0/extract/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.241659 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/extract-utilities/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.467906 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/extract-content/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.482314 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/extract-content/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.483052 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/extract-utilities/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.652804 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/extract-content/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.699417 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/extract-utilities/0.log" Jan 28 18:33:06 crc kubenswrapper[4811]: I0128 18:33:06.844972 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/extract-utilities/0.log" Jan 28 18:33:07 crc kubenswrapper[4811]: I0128 18:33:07.211314 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/extract-content/0.log" Jan 28 18:33:07 crc kubenswrapper[4811]: I0128 18:33:07.238350 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/extract-content/0.log" Jan 28 18:33:07 crc kubenswrapper[4811]: I0128 18:33:07.278255 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/extract-utilities/0.log" Jan 28 18:33:07 crc kubenswrapper[4811]: I0128 18:33:07.470356 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/extract-utilities/0.log" Jan 28 18:33:07 crc kubenswrapper[4811]: I0128 18:33:07.476611 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/extract-content/0.log" Jan 28 18:33:07 crc kubenswrapper[4811]: I0128 18:33:07.717670 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-zcd7b_35fbe6f3-3900-4d40-9525-b171b73ea6b1/marketplace-operator/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.014755 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/extract-utilities/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.121850 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/extract-utilities/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.154334 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/extract-content/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.243837 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/extract-content/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.369021 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/extract-utilities/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.463307 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/extract-content/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.692265 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/extract-utilities/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.925157 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/extract-utilities/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.938511 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grxks_8f5fbe9d-0807-4528-a821-6b3ecd64236f/registry-server/0.log" Jan 28 18:33:08 crc kubenswrapper[4811]: I0128 18:33:08.941744 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/extract-content/0.log" Jan 28 18:33:09 crc kubenswrapper[4811]: I0128 18:33:09.027347 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tkqqj_38348412-bdb3-4e83-aa59-cb46112a648e/registry-server/0.log" Jan 28 18:33:09 crc kubenswrapper[4811]: I0128 18:33:09.123772 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/extract-content/0.log" Jan 28 18:33:09 crc kubenswrapper[4811]: I0128 18:33:09.130307 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-spdrn_70389e06-5bb1-421b-a403-7f1c5147bedb/registry-server/0.log" Jan 28 18:33:09 crc kubenswrapper[4811]: I0128 18:33:09.327512 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/extract-utilities/0.log" Jan 28 18:33:09 crc kubenswrapper[4811]: I0128 18:33:09.374321 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/extract-content/0.log" Jan 28 18:33:10 crc kubenswrapper[4811]: I0128 18:33:10.656079 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mnp8r_7a12e131-89ad-4b37-b79e-2ba9e40122a3/registry-server/0.log" Jan 28 18:33:21 crc kubenswrapper[4811]: I0128 18:33:21.865394 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-wpqgl_05eff175-d1b2-4fde-b7ff-c94da0a5af2a/prometheus-operator/0.log" Jan 28 18:33:21 crc kubenswrapper[4811]: I0128 18:33:21.925061 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-64974df957-pnkd5_c4f3ab08-1768-4114-98e0-646b6657c0ec/prometheus-operator-admission-webhook/0.log" Jan 28 18:33:21 crc kubenswrapper[4811]: I0128 18:33:21.945445 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-64974df957-c8hws_ad3a640d-e3e1-4a4f-ad76-e728f8b73214/prometheus-operator-admission-webhook/0.log" Jan 28 18:33:22 crc kubenswrapper[4811]: I0128 18:33:22.126940 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-zfnch_0f9309ad-f7c3-4e57-a1e4-fba21275d588/operator/0.log" Jan 28 18:33:22 crc kubenswrapper[4811]: I0128 18:33:22.130808 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-8fslw_7c526f6a-a27b-4d38-b4a1-c70fed01753f/perses-operator/0.log" Jan 28 18:35:03 crc kubenswrapper[4811]: I0128 18:35:03.086790 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:35:03 crc kubenswrapper[4811]: I0128 18:35:03.087352 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:35:33 crc kubenswrapper[4811]: I0128 18:35:33.087243 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:35:33 crc kubenswrapper[4811]: I0128 18:35:33.087791 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:35:42 crc kubenswrapper[4811]: I0128 18:35:42.172552 4811 generic.go:334] "Generic (PLEG): container finished" podID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerID="7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9" exitCode=0 Jan 28 18:35:42 crc kubenswrapper[4811]: I0128 18:35:42.172630 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" event={"ID":"40ce1205-d2e2-4470-9fda-dfcdaef4b274","Type":"ContainerDied","Data":"7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9"} Jan 28 18:35:42 crc kubenswrapper[4811]: I0128 18:35:42.173832 4811 scope.go:117] "RemoveContainer" containerID="7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9" Jan 28 18:35:43 crc kubenswrapper[4811]: I0128 18:35:43.180569 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-lqzt5_must-gather-zrjtz_40ce1205-d2e2-4470-9fda-dfcdaef4b274/gather/0.log" Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.382704 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-lqzt5/must-gather-zrjtz"] Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.383672 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="copy" containerID="cri-o://b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d" gracePeriod=2 Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.405194 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-lqzt5/must-gather-zrjtz"] Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.888130 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-lqzt5_must-gather-zrjtz_40ce1205-d2e2-4470-9fda-dfcdaef4b274/copy/0.log" Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.889832 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.978315 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtrhm\" (UniqueName: \"kubernetes.io/projected/40ce1205-d2e2-4470-9fda-dfcdaef4b274-kube-api-access-vtrhm\") pod \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.978384 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40ce1205-d2e2-4470-9fda-dfcdaef4b274-must-gather-output\") pod \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\" (UID: \"40ce1205-d2e2-4470-9fda-dfcdaef4b274\") " Jan 28 18:35:52 crc kubenswrapper[4811]: I0128 18:35:52.983841 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40ce1205-d2e2-4470-9fda-dfcdaef4b274-kube-api-access-vtrhm" (OuterVolumeSpecName: "kube-api-access-vtrhm") pod "40ce1205-d2e2-4470-9fda-dfcdaef4b274" (UID: "40ce1205-d2e2-4470-9fda-dfcdaef4b274"). InnerVolumeSpecName "kube-api-access-vtrhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.081761 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtrhm\" (UniqueName: \"kubernetes.io/projected/40ce1205-d2e2-4470-9fda-dfcdaef4b274-kube-api-access-vtrhm\") on node \"crc\" DevicePath \"\"" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.178211 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40ce1205-d2e2-4470-9fda-dfcdaef4b274-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "40ce1205-d2e2-4470-9fda-dfcdaef4b274" (UID: "40ce1205-d2e2-4470-9fda-dfcdaef4b274"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.183413 4811 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40ce1205-d2e2-4470-9fda-dfcdaef4b274-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.297013 4811 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-lqzt5_must-gather-zrjtz_40ce1205-d2e2-4470-9fda-dfcdaef4b274/copy/0.log" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.297507 4811 generic.go:334] "Generic (PLEG): container finished" podID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerID="b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d" exitCode=143 Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.297562 4811 scope.go:117] "RemoveContainer" containerID="b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.297727 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lqzt5/must-gather-zrjtz" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.353538 4811 scope.go:117] "RemoveContainer" containerID="7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.533961 4811 scope.go:117] "RemoveContainer" containerID="b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d" Jan 28 18:35:53 crc kubenswrapper[4811]: E0128 18:35:53.534347 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d\": container with ID starting with b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d not found: ID does not exist" containerID="b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.534377 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d"} err="failed to get container status \"b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d\": rpc error: code = NotFound desc = could not find container \"b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d\": container with ID starting with b418a834b6d49707cd038d67e64272bb90d78c5945184acacd7932e212035c3d not found: ID does not exist" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.534513 4811 scope.go:117] "RemoveContainer" containerID="7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9" Jan 28 18:35:53 crc kubenswrapper[4811]: E0128 18:35:53.534720 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9\": container with ID starting with 7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9 not found: ID does not exist" containerID="7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9" Jan 28 18:35:53 crc kubenswrapper[4811]: I0128 18:35:53.534742 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9"} err="failed to get container status \"7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9\": rpc error: code = NotFound desc = could not find container \"7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9\": container with ID starting with 7d80801892257fe050a9bb26caa1ec12a3ff5e4b1cfc21770972a44ddbc25ec9 not found: ID does not exist" Jan 28 18:35:54 crc kubenswrapper[4811]: I0128 18:35:54.357328 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" path="/var/lib/kubelet/pods/40ce1205-d2e2-4470-9fda-dfcdaef4b274/volumes" Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.087802 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.088485 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.088536 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.090480 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5de93d3b64bb728d8830015da463664bfdb31100434662da7cfd020c44890fc2"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.090567 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://5de93d3b64bb728d8830015da463664bfdb31100434662da7cfd020c44890fc2" gracePeriod=600 Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.409036 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="5de93d3b64bb728d8830015da463664bfdb31100434662da7cfd020c44890fc2" exitCode=0 Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.409114 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"5de93d3b64bb728d8830015da463664bfdb31100434662da7cfd020c44890fc2"} Jan 28 18:36:03 crc kubenswrapper[4811]: I0128 18:36:03.409598 4811 scope.go:117] "RemoveContainer" containerID="763482efbb1dccdafcfbf0255fb0a75f16af0ca4d1583736aff621b10a3a2f9e" Jan 28 18:36:04 crc kubenswrapper[4811]: I0128 18:36:04.419322 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerStarted","Data":"2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e"} Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.470846 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s7j98"] Jan 28 18:37:40 crc kubenswrapper[4811]: E0128 18:37:40.472967 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="copy" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.473064 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="copy" Jan 28 18:37:40 crc kubenswrapper[4811]: E0128 18:37:40.473162 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b7acfb9-e881-46c2-a571-b89664a5e7e2" containerName="collect-profiles" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.473226 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b7acfb9-e881-46c2-a571-b89664a5e7e2" containerName="collect-profiles" Jan 28 18:37:40 crc kubenswrapper[4811]: E0128 18:37:40.473290 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="gather" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.473343 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="gather" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.473628 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="copy" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.473708 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b7acfb9-e881-46c2-a571-b89664a5e7e2" containerName="collect-profiles" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.473780 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="40ce1205-d2e2-4470-9fda-dfcdaef4b274" containerName="gather" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.475659 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.488133 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s7j98"] Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.657207 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc5hw\" (UniqueName: \"kubernetes.io/projected/d12f5dd4-50cb-4e79-835c-6de17a68982c-kube-api-access-mc5hw\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.657666 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-catalog-content\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.657712 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-utilities\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.760200 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc5hw\" (UniqueName: \"kubernetes.io/projected/d12f5dd4-50cb-4e79-835c-6de17a68982c-kube-api-access-mc5hw\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.760305 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-catalog-content\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.760347 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-utilities\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.760961 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-catalog-content\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.761038 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-utilities\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.784098 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc5hw\" (UniqueName: \"kubernetes.io/projected/d12f5dd4-50cb-4e79-835c-6de17a68982c-kube-api-access-mc5hw\") pod \"certified-operators-s7j98\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:40 crc kubenswrapper[4811]: I0128 18:37:40.803134 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:41 crc kubenswrapper[4811]: I0128 18:37:41.414221 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s7j98"] Jan 28 18:37:42 crc kubenswrapper[4811]: I0128 18:37:42.731717 4811 generic.go:334] "Generic (PLEG): container finished" podID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerID="97f026bcfd5d68e308b09917ba7884ebbcebe16f49d4bc00636cb367f78c6e82" exitCode=0 Jan 28 18:37:42 crc kubenswrapper[4811]: I0128 18:37:42.731783 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerDied","Data":"97f026bcfd5d68e308b09917ba7884ebbcebe16f49d4bc00636cb367f78c6e82"} Jan 28 18:37:42 crc kubenswrapper[4811]: I0128 18:37:42.732292 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerStarted","Data":"278257f0286a4c037425860116b911f4d96765f9dcf95778fa00e1d1213fbd54"} Jan 28 18:37:42 crc kubenswrapper[4811]: I0128 18:37:42.734156 4811 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 28 18:37:45 crc kubenswrapper[4811]: I0128 18:37:45.769223 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerStarted","Data":"9c4c9c86d524a3de5c003adb9744c1930057b520703e05075d5b2e31c0af389a"} Jan 28 18:37:46 crc kubenswrapper[4811]: I0128 18:37:46.779698 4811 generic.go:334] "Generic (PLEG): container finished" podID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerID="9c4c9c86d524a3de5c003adb9744c1930057b520703e05075d5b2e31c0af389a" exitCode=0 Jan 28 18:37:46 crc kubenswrapper[4811]: I0128 18:37:46.779941 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerDied","Data":"9c4c9c86d524a3de5c003adb9744c1930057b520703e05075d5b2e31c0af389a"} Jan 28 18:37:47 crc kubenswrapper[4811]: I0128 18:37:47.790768 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerStarted","Data":"f869c0842f08fb7976a63a469450f69e441b153b1d3f778bcee0a85602e57df0"} Jan 28 18:37:47 crc kubenswrapper[4811]: I0128 18:37:47.811191 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s7j98" podStartSLOduration=3.326875024 podStartE2EDuration="7.811176394s" podCreationTimestamp="2026-01-28 18:37:40 +0000 UTC" firstStartedPulling="2026-01-28 18:37:42.733907739 +0000 UTC m=+10355.488271322" lastFinishedPulling="2026-01-28 18:37:47.218209109 +0000 UTC m=+10359.972572692" observedRunningTime="2026-01-28 18:37:47.809704824 +0000 UTC m=+10360.564068427" watchObservedRunningTime="2026-01-28 18:37:47.811176394 +0000 UTC m=+10360.565539967" Jan 28 18:37:50 crc kubenswrapper[4811]: I0128 18:37:50.804009 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:50 crc kubenswrapper[4811]: I0128 18:37:50.804494 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:50 crc kubenswrapper[4811]: I0128 18:37:50.938770 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.498528 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ndlfs"] Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.532267 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.540633 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndlfs"] Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.566246 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-catalog-content\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.566382 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-utilities\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.566556 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8pkp\" (UniqueName: \"kubernetes.io/projected/c47293df-a0fa-499e-a5a3-a544ec1d3517-kube-api-access-d8pkp\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.668359 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8pkp\" (UniqueName: \"kubernetes.io/projected/c47293df-a0fa-499e-a5a3-a544ec1d3517-kube-api-access-d8pkp\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.668501 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-catalog-content\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.668564 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-utilities\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.669190 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-utilities\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.669361 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-catalog-content\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.689590 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8pkp\" (UniqueName: \"kubernetes.io/projected/c47293df-a0fa-499e-a5a3-a544ec1d3517-kube-api-access-d8pkp\") pod \"redhat-marketplace-ndlfs\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:54 crc kubenswrapper[4811]: I0128 18:37:54.867521 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:37:55 crc kubenswrapper[4811]: I0128 18:37:55.389001 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndlfs"] Jan 28 18:37:55 crc kubenswrapper[4811]: I0128 18:37:55.870191 4811 generic.go:334] "Generic (PLEG): container finished" podID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerID="8f1c8f4d9366e7efdc7fda87f4d3345a2b6e93637cd18918e5a58a5c27ff294e" exitCode=0 Jan 28 18:37:55 crc kubenswrapper[4811]: I0128 18:37:55.870235 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndlfs" event={"ID":"c47293df-a0fa-499e-a5a3-a544ec1d3517","Type":"ContainerDied","Data":"8f1c8f4d9366e7efdc7fda87f4d3345a2b6e93637cd18918e5a58a5c27ff294e"} Jan 28 18:37:55 crc kubenswrapper[4811]: I0128 18:37:55.870260 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndlfs" event={"ID":"c47293df-a0fa-499e-a5a3-a544ec1d3517","Type":"ContainerStarted","Data":"0d66f793a57fe662d0d0ec154de226f64b09a2e2dc3769804feafeaea00fd466"} Jan 28 18:37:57 crc kubenswrapper[4811]: I0128 18:37:57.887093 4811 generic.go:334] "Generic (PLEG): container finished" podID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerID="7360b673c8c1564abaa310040624cda7da4966b6ff51836470682292c01fdd3f" exitCode=0 Jan 28 18:37:57 crc kubenswrapper[4811]: I0128 18:37:57.887164 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndlfs" event={"ID":"c47293df-a0fa-499e-a5a3-a544ec1d3517","Type":"ContainerDied","Data":"7360b673c8c1564abaa310040624cda7da4966b6ff51836470682292c01fdd3f"} Jan 28 18:37:58 crc kubenswrapper[4811]: I0128 18:37:58.901410 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndlfs" event={"ID":"c47293df-a0fa-499e-a5a3-a544ec1d3517","Type":"ContainerStarted","Data":"b6717e500c140bb9293f9dd066606213794a33987213288bac81ffb79f393afb"} Jan 28 18:37:58 crc kubenswrapper[4811]: I0128 18:37:58.927118 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ndlfs" podStartSLOduration=2.426372938 podStartE2EDuration="4.927094967s" podCreationTimestamp="2026-01-28 18:37:54 +0000 UTC" firstStartedPulling="2026-01-28 18:37:55.872467393 +0000 UTC m=+10368.626830976" lastFinishedPulling="2026-01-28 18:37:58.373189422 +0000 UTC m=+10371.127553005" observedRunningTime="2026-01-28 18:37:58.922034139 +0000 UTC m=+10371.676397732" watchObservedRunningTime="2026-01-28 18:37:58.927094967 +0000 UTC m=+10371.681458580" Jan 28 18:38:00 crc kubenswrapper[4811]: I0128 18:38:00.861842 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:38:03 crc kubenswrapper[4811]: I0128 18:38:03.087765 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:38:03 crc kubenswrapper[4811]: I0128 18:38:03.088125 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:38:04 crc kubenswrapper[4811]: I0128 18:38:04.868807 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:38:04 crc kubenswrapper[4811]: I0128 18:38:04.870124 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.040884 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.476946 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s7j98"] Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.477184 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s7j98" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="registry-server" containerID="cri-o://f869c0842f08fb7976a63a469450f69e441b153b1d3f778bcee0a85602e57df0" gracePeriod=2 Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.971395 4811 generic.go:334] "Generic (PLEG): container finished" podID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerID="f869c0842f08fb7976a63a469450f69e441b153b1d3f778bcee0a85602e57df0" exitCode=0 Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.971471 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerDied","Data":"f869c0842f08fb7976a63a469450f69e441b153b1d3f778bcee0a85602e57df0"} Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.971801 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7j98" event={"ID":"d12f5dd4-50cb-4e79-835c-6de17a68982c","Type":"ContainerDied","Data":"278257f0286a4c037425860116b911f4d96765f9dcf95778fa00e1d1213fbd54"} Jan 28 18:38:05 crc kubenswrapper[4811]: I0128 18:38:05.971829 4811 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="278257f0286a4c037425860116b911f4d96765f9dcf95778fa00e1d1213fbd54" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.004948 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.071144 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.117413 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-utilities\") pod \"d12f5dd4-50cb-4e79-835c-6de17a68982c\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.117566 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-catalog-content\") pod \"d12f5dd4-50cb-4e79-835c-6de17a68982c\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.117861 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mc5hw\" (UniqueName: \"kubernetes.io/projected/d12f5dd4-50cb-4e79-835c-6de17a68982c-kube-api-access-mc5hw\") pod \"d12f5dd4-50cb-4e79-835c-6de17a68982c\" (UID: \"d12f5dd4-50cb-4e79-835c-6de17a68982c\") " Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.118364 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-utilities" (OuterVolumeSpecName: "utilities") pod "d12f5dd4-50cb-4e79-835c-6de17a68982c" (UID: "d12f5dd4-50cb-4e79-835c-6de17a68982c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.118509 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.129684 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d12f5dd4-50cb-4e79-835c-6de17a68982c-kube-api-access-mc5hw" (OuterVolumeSpecName: "kube-api-access-mc5hw") pod "d12f5dd4-50cb-4e79-835c-6de17a68982c" (UID: "d12f5dd4-50cb-4e79-835c-6de17a68982c"). InnerVolumeSpecName "kube-api-access-mc5hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.176480 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d12f5dd4-50cb-4e79-835c-6de17a68982c" (UID: "d12f5dd4-50cb-4e79-835c-6de17a68982c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.220528 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d12f5dd4-50cb-4e79-835c-6de17a68982c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.220565 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mc5hw\" (UniqueName: \"kubernetes.io/projected/d12f5dd4-50cb-4e79-835c-6de17a68982c-kube-api-access-mc5hw\") on node \"crc\" DevicePath \"\"" Jan 28 18:38:06 crc kubenswrapper[4811]: I0128 18:38:06.981812 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7j98" Jan 28 18:38:07 crc kubenswrapper[4811]: I0128 18:38:07.018999 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s7j98"] Jan 28 18:38:07 crc kubenswrapper[4811]: I0128 18:38:07.031579 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s7j98"] Jan 28 18:38:08 crc kubenswrapper[4811]: I0128 18:38:08.356467 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" path="/var/lib/kubelet/pods/d12f5dd4-50cb-4e79-835c-6de17a68982c/volumes" Jan 28 18:38:09 crc kubenswrapper[4811]: I0128 18:38:09.676179 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndlfs"] Jan 28 18:38:09 crc kubenswrapper[4811]: I0128 18:38:09.676726 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ndlfs" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="registry-server" containerID="cri-o://b6717e500c140bb9293f9dd066606213794a33987213288bac81ffb79f393afb" gracePeriod=2 Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.016469 4811 generic.go:334] "Generic (PLEG): container finished" podID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerID="b6717e500c140bb9293f9dd066606213794a33987213288bac81ffb79f393afb" exitCode=0 Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.016926 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndlfs" event={"ID":"c47293df-a0fa-499e-a5a3-a544ec1d3517","Type":"ContainerDied","Data":"b6717e500c140bb9293f9dd066606213794a33987213288bac81ffb79f393afb"} Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.166391 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.219466 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-catalog-content\") pod \"c47293df-a0fa-499e-a5a3-a544ec1d3517\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.219535 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8pkp\" (UniqueName: \"kubernetes.io/projected/c47293df-a0fa-499e-a5a3-a544ec1d3517-kube-api-access-d8pkp\") pod \"c47293df-a0fa-499e-a5a3-a544ec1d3517\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.219595 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-utilities\") pod \"c47293df-a0fa-499e-a5a3-a544ec1d3517\" (UID: \"c47293df-a0fa-499e-a5a3-a544ec1d3517\") " Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.221009 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-utilities" (OuterVolumeSpecName: "utilities") pod "c47293df-a0fa-499e-a5a3-a544ec1d3517" (UID: "c47293df-a0fa-499e-a5a3-a544ec1d3517"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.229718 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c47293df-a0fa-499e-a5a3-a544ec1d3517-kube-api-access-d8pkp" (OuterVolumeSpecName: "kube-api-access-d8pkp") pod "c47293df-a0fa-499e-a5a3-a544ec1d3517" (UID: "c47293df-a0fa-499e-a5a3-a544ec1d3517"). InnerVolumeSpecName "kube-api-access-d8pkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.245500 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c47293df-a0fa-499e-a5a3-a544ec1d3517" (UID: "c47293df-a0fa-499e-a5a3-a544ec1d3517"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.322212 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.322251 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c47293df-a0fa-499e-a5a3-a544ec1d3517-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:38:10 crc kubenswrapper[4811]: I0128 18:38:10.322267 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8pkp\" (UniqueName: \"kubernetes.io/projected/c47293df-a0fa-499e-a5a3-a544ec1d3517-kube-api-access-d8pkp\") on node \"crc\" DevicePath \"\"" Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.031803 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndlfs" event={"ID":"c47293df-a0fa-499e-a5a3-a544ec1d3517","Type":"ContainerDied","Data":"0d66f793a57fe662d0d0ec154de226f64b09a2e2dc3769804feafeaea00fd466"} Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.031910 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndlfs" Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.032354 4811 scope.go:117] "RemoveContainer" containerID="b6717e500c140bb9293f9dd066606213794a33987213288bac81ffb79f393afb" Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.056227 4811 scope.go:117] "RemoveContainer" containerID="7360b673c8c1564abaa310040624cda7da4966b6ff51836470682292c01fdd3f" Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.064480 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndlfs"] Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.076788 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndlfs"] Jan 28 18:38:11 crc kubenswrapper[4811]: I0128 18:38:11.095876 4811 scope.go:117] "RemoveContainer" containerID="8f1c8f4d9366e7efdc7fda87f4d3345a2b6e93637cd18918e5a58a5c27ff294e" Jan 28 18:38:12 crc kubenswrapper[4811]: I0128 18:38:12.352249 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" path="/var/lib/kubelet/pods/c47293df-a0fa-499e-a5a3-a544ec1d3517/volumes" Jan 28 18:38:33 crc kubenswrapper[4811]: I0128 18:38:33.088633 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:38:33 crc kubenswrapper[4811]: I0128 18:38:33.089542 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.088031 4811 patch_prober.go:28] interesting pod/machine-config-daemon-4vcr6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.088922 4811 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.089001 4811 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.090223 4811 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e"} pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.090331 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerName="machine-config-daemon" containerID="cri-o://2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" gracePeriod=600 Jan 28 18:39:03 crc kubenswrapper[4811]: E0128 18:39:03.221825 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.572620 4811 generic.go:334] "Generic (PLEG): container finished" podID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" exitCode=0 Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.572677 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" event={"ID":"d2a0331a-ea7c-4888-a47e-e73015ab42e0","Type":"ContainerDied","Data":"2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e"} Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.572990 4811 scope.go:117] "RemoveContainer" containerID="5de93d3b64bb728d8830015da463664bfdb31100434662da7cfd020c44890fc2" Jan 28 18:39:03 crc kubenswrapper[4811]: I0128 18:39:03.573810 4811 scope.go:117] "RemoveContainer" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" Jan 28 18:39:03 crc kubenswrapper[4811]: E0128 18:39:03.574304 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:39:14 crc kubenswrapper[4811]: I0128 18:39:14.339517 4811 scope.go:117] "RemoveContainer" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" Jan 28 18:39:14 crc kubenswrapper[4811]: E0128 18:39:14.340269 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:39:28 crc kubenswrapper[4811]: I0128 18:39:28.347968 4811 scope.go:117] "RemoveContainer" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" Jan 28 18:39:28 crc kubenswrapper[4811]: E0128 18:39:28.348828 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.386971 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lcfcr"] Jan 28 18:39:29 crc kubenswrapper[4811]: E0128 18:39:29.387414 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="extract-content" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387451 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="extract-content" Jan 28 18:39:29 crc kubenswrapper[4811]: E0128 18:39:29.387471 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="registry-server" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387477 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="registry-server" Jan 28 18:39:29 crc kubenswrapper[4811]: E0128 18:39:29.387492 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="registry-server" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387499 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="registry-server" Jan 28 18:39:29 crc kubenswrapper[4811]: E0128 18:39:29.387515 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="extract-utilities" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387521 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="extract-utilities" Jan 28 18:39:29 crc kubenswrapper[4811]: E0128 18:39:29.387531 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="extract-content" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387538 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="extract-content" Jan 28 18:39:29 crc kubenswrapper[4811]: E0128 18:39:29.387563 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="extract-utilities" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387569 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="extract-utilities" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387750 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="d12f5dd4-50cb-4e79-835c-6de17a68982c" containerName="registry-server" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.387763 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="c47293df-a0fa-499e-a5a3-a544ec1d3517" containerName="registry-server" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.392146 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.402881 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lcfcr"] Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.488888 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgc68\" (UniqueName: \"kubernetes.io/projected/55a8a252-a5f0-454a-8a0d-a3dff2314391-kube-api-access-jgc68\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.489234 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-catalog-content\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.489288 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-utilities\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.591838 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgc68\" (UniqueName: \"kubernetes.io/projected/55a8a252-a5f0-454a-8a0d-a3dff2314391-kube-api-access-jgc68\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.591930 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-catalog-content\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.591975 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-utilities\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.592663 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-utilities\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.592747 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-catalog-content\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.614739 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgc68\" (UniqueName: \"kubernetes.io/projected/55a8a252-a5f0-454a-8a0d-a3dff2314391-kube-api-access-jgc68\") pod \"redhat-operators-lcfcr\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:29 crc kubenswrapper[4811]: I0128 18:39:29.721881 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:30 crc kubenswrapper[4811]: I0128 18:39:30.255951 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lcfcr"] Jan 28 18:39:30 crc kubenswrapper[4811]: I0128 18:39:30.855397 4811 generic.go:334] "Generic (PLEG): container finished" podID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerID="892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5" exitCode=0 Jan 28 18:39:30 crc kubenswrapper[4811]: I0128 18:39:30.855488 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerDied","Data":"892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5"} Jan 28 18:39:30 crc kubenswrapper[4811]: I0128 18:39:30.855872 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerStarted","Data":"14bbf18dfb0e3d66f0db970d741764ade428449b677220d4cd28638011d4249d"} Jan 28 18:39:32 crc kubenswrapper[4811]: I0128 18:39:32.877377 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerStarted","Data":"89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5"} Jan 28 18:39:43 crc kubenswrapper[4811]: I0128 18:39:43.339651 4811 scope.go:117] "RemoveContainer" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" Jan 28 18:39:43 crc kubenswrapper[4811]: E0128 18:39:43.340450 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:39:45 crc kubenswrapper[4811]: I0128 18:39:45.009775 4811 generic.go:334] "Generic (PLEG): container finished" podID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerID="89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5" exitCode=0 Jan 28 18:39:45 crc kubenswrapper[4811]: I0128 18:39:45.009837 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerDied","Data":"89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5"} Jan 28 18:39:47 crc kubenswrapper[4811]: I0128 18:39:47.030894 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerStarted","Data":"e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c"} Jan 28 18:39:47 crc kubenswrapper[4811]: I0128 18:39:47.056423 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lcfcr" podStartSLOduration=2.490286884 podStartE2EDuration="18.05639943s" podCreationTimestamp="2026-01-28 18:39:29 +0000 UTC" firstStartedPulling="2026-01-28 18:39:30.858237417 +0000 UTC m=+10463.612601000" lastFinishedPulling="2026-01-28 18:39:46.424349963 +0000 UTC m=+10479.178713546" observedRunningTime="2026-01-28 18:39:47.047322043 +0000 UTC m=+10479.801685616" watchObservedRunningTime="2026-01-28 18:39:47.05639943 +0000 UTC m=+10479.810763013" Jan 28 18:39:49 crc kubenswrapper[4811]: I0128 18:39:49.722486 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:49 crc kubenswrapper[4811]: I0128 18:39:49.722910 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:50 crc kubenswrapper[4811]: I0128 18:39:50.779210 4811 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lcfcr" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="registry-server" probeResult="failure" output=< Jan 28 18:39:50 crc kubenswrapper[4811]: timeout: failed to connect service ":50051" within 1s Jan 28 18:39:50 crc kubenswrapper[4811]: > Jan 28 18:39:56 crc kubenswrapper[4811]: I0128 18:39:56.339331 4811 scope.go:117] "RemoveContainer" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" Jan 28 18:39:56 crc kubenswrapper[4811]: E0128 18:39:56.340146 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:39:59 crc kubenswrapper[4811]: I0128 18:39:59.775405 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:39:59 crc kubenswrapper[4811]: I0128 18:39:59.828840 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:40:00 crc kubenswrapper[4811]: I0128 18:40:00.590888 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lcfcr"] Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.172301 4811 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lcfcr" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="registry-server" containerID="cri-o://e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c" gracePeriod=2 Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.742456 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.837088 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-catalog-content\") pod \"55a8a252-a5f0-454a-8a0d-a3dff2314391\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.837183 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgc68\" (UniqueName: \"kubernetes.io/projected/55a8a252-a5f0-454a-8a0d-a3dff2314391-kube-api-access-jgc68\") pod \"55a8a252-a5f0-454a-8a0d-a3dff2314391\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.837323 4811 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-utilities\") pod \"55a8a252-a5f0-454a-8a0d-a3dff2314391\" (UID: \"55a8a252-a5f0-454a-8a0d-a3dff2314391\") " Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.838809 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-utilities" (OuterVolumeSpecName: "utilities") pod "55a8a252-a5f0-454a-8a0d-a3dff2314391" (UID: "55a8a252-a5f0-454a-8a0d-a3dff2314391"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.853155 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55a8a252-a5f0-454a-8a0d-a3dff2314391-kube-api-access-jgc68" (OuterVolumeSpecName: "kube-api-access-jgc68") pod "55a8a252-a5f0-454a-8a0d-a3dff2314391" (UID: "55a8a252-a5f0-454a-8a0d-a3dff2314391"). InnerVolumeSpecName "kube-api-access-jgc68". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.939581 4811 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgc68\" (UniqueName: \"kubernetes.io/projected/55a8a252-a5f0-454a-8a0d-a3dff2314391-kube-api-access-jgc68\") on node \"crc\" DevicePath \"\"" Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.939609 4811 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-utilities\") on node \"crc\" DevicePath \"\"" Jan 28 18:40:01 crc kubenswrapper[4811]: I0128 18:40:01.970612 4811 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55a8a252-a5f0-454a-8a0d-a3dff2314391" (UID: "55a8a252-a5f0-454a-8a0d-a3dff2314391"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.041977 4811 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a8a252-a5f0-454a-8a0d-a3dff2314391-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.188903 4811 generic.go:334] "Generic (PLEG): container finished" podID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerID="e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c" exitCode=0 Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.188953 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerDied","Data":"e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c"} Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.188981 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lcfcr" event={"ID":"55a8a252-a5f0-454a-8a0d-a3dff2314391","Type":"ContainerDied","Data":"14bbf18dfb0e3d66f0db970d741764ade428449b677220d4cd28638011d4249d"} Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.189011 4811 scope.go:117] "RemoveContainer" containerID="e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.189043 4811 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lcfcr" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.212310 4811 scope.go:117] "RemoveContainer" containerID="89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.241861 4811 scope.go:117] "RemoveContainer" containerID="892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.242647 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lcfcr"] Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.255507 4811 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lcfcr"] Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.293545 4811 scope.go:117] "RemoveContainer" containerID="e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c" Jan 28 18:40:02 crc kubenswrapper[4811]: E0128 18:40:02.293964 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c\": container with ID starting with e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c not found: ID does not exist" containerID="e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.294078 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c"} err="failed to get container status \"e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c\": rpc error: code = NotFound desc = could not find container \"e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c\": container with ID starting with e899a25161507be7a7ce00628097d446855de155a5095669e9e9be3f7dac6b5c not found: ID does not exist" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.294159 4811 scope.go:117] "RemoveContainer" containerID="89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5" Jan 28 18:40:02 crc kubenswrapper[4811]: E0128 18:40:02.295146 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5\": container with ID starting with 89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5 not found: ID does not exist" containerID="89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.295240 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5"} err="failed to get container status \"89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5\": rpc error: code = NotFound desc = could not find container \"89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5\": container with ID starting with 89605bece49de71563627dfb7ac352d5212bc73495fc75e881fe1dab649bfdd5 not found: ID does not exist" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.295314 4811 scope.go:117] "RemoveContainer" containerID="892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5" Jan 28 18:40:02 crc kubenswrapper[4811]: E0128 18:40:02.295702 4811 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5\": container with ID starting with 892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5 not found: ID does not exist" containerID="892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.295804 4811 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5"} err="failed to get container status \"892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5\": rpc error: code = NotFound desc = could not find container \"892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5\": container with ID starting with 892116fe12b80007c54fd98d6c051b01ede0c248b8bb406f42ba3b1078a8d6f5 not found: ID does not exist" Jan 28 18:40:02 crc kubenswrapper[4811]: I0128 18:40:02.354575 4811 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" path="/var/lib/kubelet/pods/55a8a252-a5f0-454a-8a0d-a3dff2314391/volumes" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.595001 4811 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zklxw"] Jan 28 18:40:04 crc kubenswrapper[4811]: E0128 18:40:04.595717 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="extract-utilities" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.595729 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="extract-utilities" Jan 28 18:40:04 crc kubenswrapper[4811]: E0128 18:40:04.595766 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="extract-content" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.595773 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="extract-content" Jan 28 18:40:04 crc kubenswrapper[4811]: E0128 18:40:04.595791 4811 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="registry-server" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.595799 4811 state_mem.go:107] "Deleted CPUSet assignment" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="registry-server" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.595980 4811 memory_manager.go:354] "RemoveStaleState removing state" podUID="55a8a252-a5f0-454a-8a0d-a3dff2314391" containerName="registry-server" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.597481 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.614383 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zklxw"] Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.709973 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r8gm\" (UniqueName: \"kubernetes.io/projected/ce572de0-c8ef-45b9-8777-640c7b59badc-kube-api-access-8r8gm\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.710091 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce572de0-c8ef-45b9-8777-640c7b59badc-utilities\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.710321 4811 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce572de0-c8ef-45b9-8777-640c7b59badc-catalog-content\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.813363 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce572de0-c8ef-45b9-8777-640c7b59badc-catalog-content\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.813588 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r8gm\" (UniqueName: \"kubernetes.io/projected/ce572de0-c8ef-45b9-8777-640c7b59badc-kube-api-access-8r8gm\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.813690 4811 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce572de0-c8ef-45b9-8777-640c7b59badc-utilities\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.813870 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce572de0-c8ef-45b9-8777-640c7b59badc-catalog-content\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.814340 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce572de0-c8ef-45b9-8777-640c7b59badc-utilities\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.839729 4811 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r8gm\" (UniqueName: \"kubernetes.io/projected/ce572de0-c8ef-45b9-8777-640c7b59badc-kube-api-access-8r8gm\") pod \"community-operators-zklxw\" (UID: \"ce572de0-c8ef-45b9-8777-640c7b59badc\") " pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:04 crc kubenswrapper[4811]: I0128 18:40:04.926345 4811 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:05 crc kubenswrapper[4811]: I0128 18:40:05.522004 4811 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zklxw"] Jan 28 18:40:06 crc kubenswrapper[4811]: I0128 18:40:06.261727 4811 generic.go:334] "Generic (PLEG): container finished" podID="ce572de0-c8ef-45b9-8777-640c7b59badc" containerID="34ec18af78bb2939032e430f472c29fae4e6c617f524bc9612054c3d02e625f6" exitCode=0 Jan 28 18:40:06 crc kubenswrapper[4811]: I0128 18:40:06.261779 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zklxw" event={"ID":"ce572de0-c8ef-45b9-8777-640c7b59badc","Type":"ContainerDied","Data":"34ec18af78bb2939032e430f472c29fae4e6c617f524bc9612054c3d02e625f6"} Jan 28 18:40:06 crc kubenswrapper[4811]: I0128 18:40:06.261809 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zklxw" event={"ID":"ce572de0-c8ef-45b9-8777-640c7b59badc","Type":"ContainerStarted","Data":"12fa5e62fb4ad369df87bcb7fad5decf0f8fd8d6d8c3c92ab7e2f30f5219ec8e"} Jan 28 18:40:08 crc kubenswrapper[4811]: I0128 18:40:08.284042 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zklxw" event={"ID":"ce572de0-c8ef-45b9-8777-640c7b59badc","Type":"ContainerStarted","Data":"2fb50244048aa16571c9207882a8416628a85d492da069555631498463461286"} Jan 28 18:40:09 crc kubenswrapper[4811]: I0128 18:40:09.298188 4811 generic.go:334] "Generic (PLEG): container finished" podID="ce572de0-c8ef-45b9-8777-640c7b59badc" containerID="2fb50244048aa16571c9207882a8416628a85d492da069555631498463461286" exitCode=0 Jan 28 18:40:09 crc kubenswrapper[4811]: I0128 18:40:09.298480 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zklxw" event={"ID":"ce572de0-c8ef-45b9-8777-640c7b59badc","Type":"ContainerDied","Data":"2fb50244048aa16571c9207882a8416628a85d492da069555631498463461286"} Jan 28 18:40:09 crc kubenswrapper[4811]: I0128 18:40:09.339385 4811 scope.go:117] "RemoveContainer" containerID="2614fd2dc0751f3b25df88580d36f9c7c35346fa9a91a1ea8a893c234a1b228e" Jan 28 18:40:09 crc kubenswrapper[4811]: E0128 18:40:09.339711 4811 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vcr6_openshift-machine-config-operator(d2a0331a-ea7c-4888-a47e-e73015ab42e0)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vcr6" podUID="d2a0331a-ea7c-4888-a47e-e73015ab42e0" Jan 28 18:40:11 crc kubenswrapper[4811]: I0128 18:40:11.320220 4811 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zklxw" event={"ID":"ce572de0-c8ef-45b9-8777-640c7b59badc","Type":"ContainerStarted","Data":"4ec4cec9c959daa88036b0cf1d243dd72b6126ee0536ff6f87e2eab5950da120"} Jan 28 18:40:11 crc kubenswrapper[4811]: I0128 18:40:11.350637 4811 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zklxw" podStartSLOduration=3.78511361 podStartE2EDuration="7.35061937s" podCreationTimestamp="2026-01-28 18:40:04 +0000 UTC" firstStartedPulling="2026-01-28 18:40:06.264382201 +0000 UTC m=+10499.018745784" lastFinishedPulling="2026-01-28 18:40:09.829887971 +0000 UTC m=+10502.584251544" observedRunningTime="2026-01-28 18:40:11.342961333 +0000 UTC m=+10504.097324916" watchObservedRunningTime="2026-01-28 18:40:11.35061937 +0000 UTC m=+10504.104982943" Jan 28 18:40:14 crc kubenswrapper[4811]: I0128 18:40:14.926984 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:14 crc kubenswrapper[4811]: I0128 18:40:14.927605 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:14 crc kubenswrapper[4811]: I0128 18:40:14.994558 4811 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:15 crc kubenswrapper[4811]: I0128 18:40:15.429212 4811 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zklxw" Jan 28 18:40:15 crc kubenswrapper[4811]: I0128 18:40:15.499246 4811 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zklxw"] var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515136454034024452 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015136454035017370 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015136427032016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015136427032015460 5ustar corecore